summaryrefslogtreecommitdiff
path: root/src/main/java/org
diff options
context:
space:
mode:
Diffstat (limited to 'src/main/java/org')
-rw-r--r--src/main/java/org/HdrHistogram/ArrayHistogram.java388
-rw-r--r--src/main/java/org/HdrHistogram/DirectArrayHistogram.java234
-rw-r--r--src/main/java/org/HdrHistogram/DirectMapHistogram.java492
-rw-r--r--src/main/java/org/HdrHistogram/HistogramSketch.java90
-rw-r--r--src/main/java/org/HdrHistogram/HistogramUnion.java48
-rw-r--r--src/main/java/org/HdrHistogram/Histogramer.java37
-rw-r--r--src/main/java/org/HdrHistogram/Percentile.java50
7 files changed, 1339 insertions, 0 deletions
diff --git a/src/main/java/org/HdrHistogram/ArrayHistogram.java b/src/main/java/org/HdrHistogram/ArrayHistogram.java
new file mode 100644
index 0000000..cea72d9
--- /dev/null
+++ b/src/main/java/org/HdrHistogram/ArrayHistogram.java
@@ -0,0 +1,388 @@
+package org.HdrHistogram; /**
+ * Written by Gil Tene of Azul Systems, and released to the public domain,
+ * as explained at http://creativecommons.org/publicdomain/zero/1.0/
+ *
+ * @author Gil Tene
+ */
+
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.nio.ByteBuffer;
+import java.util.*;
+import java.util.zip.DataFormatException;
+
+/**
+ * <h3>A High Dynamic Range (HDR) Histogram</h3>
+ * <p>
+ * {@link ArrayHistogram} supports the recording and analyzing sampled data value counts across a configurable integer value
+ * range with configurable value precision within the range. Value precision is expressed as the number of significant
+ * digits in the value recording, and provides control over value quantization behavior across the value range and the
+ * subsequent value resolution at any given level.
+ * <p>
+ * For example, a Histogram could be configured to track the counts of observed integer values between 0 and
+ * 3,600,000,000 while maintaining a value precision of 3 significant digits across that range. Value quantization
+ * within the range will thus be no larger than 1/1,000th (or 0.1%) of any value. This example Histogram could
+ * be used to track and analyze the counts of observed response times ranging between 1 microsecond and 1 hour
+ * in magnitude, while maintaining a value resolution of 1 microsecond up to 1 millisecond, a resolution of
+ * 1 millisecond (or better) up to one second, and a resolution of 1 second (or better) up to 1,000 seconds. At its
+ * maximum tracked value (1 hour), it would still maintain a resolution of 3.6 seconds (or better).
+ * <p>
+ * Histogram tracks value counts in <b><code>long</code></b> fields. Smaller field types are available in the
+ * {@link IntCountsHistogram} and {@link ShortCountsHistogram} implementations of
+ * {@link AbstractHistogram}.
+ * <p>
+ * Auto-resizing: When constructed with no specified value range range (or when auto-resize is turned on with {@link
+ * ArrayHistogram#setAutoResize}) a {@link ArrayHistogram} will auto-resize its dynamic range to include recorded values as
+ * they are encountered. Note that recording calls that cause auto-resizing may take longer to execute, as resizing
+ * incurs allocation and copying of internal data structures.
+ * <p>
+ * See package description for {@link org.HdrHistogram} for details.
+ */
+
+public class ArrayHistogram extends AbstractHistogram implements Histogramer{
+ long totalCount;
+ long[] counts;
+ int normalizingIndexOffset;
+
+ @Override
+ long getCountAtIndex(final int index) {
+ return counts[normalizeIndex(index, normalizingIndexOffset, countsArrayLength)];
+ }
+
+ @Override
+ long getCountAtNormalizedIndex(final int index) {
+ return counts[index];
+ }
+
+ @Override
+ void incrementCountAtIndex(final int index) {
+ counts[normalizeIndex(index, normalizingIndexOffset, countsArrayLength)]++;
+ }
+
+ @Override
+ void addToCountAtIndex(final int index, final long value) {
+ // 正常情况下normalizingIndexOffset = 0, index不用偏移
+ counts[normalizeIndex(index, normalizingIndexOffset, countsArrayLength)] += value;
+ }
+
+ @Override
+ void setCountAtIndex(int index, long value) {
+ counts[normalizeIndex(index, normalizingIndexOffset, countsArrayLength)] = value;
+ }
+
+ @Override
+ void setCountAtNormalizedIndex(int index, long value) {
+ counts[index] = value;
+ }
+
+ @Override
+ int getNormalizingIndexOffset() {
+ return normalizingIndexOffset;
+ }
+
+ @Override
+ void setNormalizingIndexOffset(int normalizingIndexOffset) {
+ this.normalizingIndexOffset = normalizingIndexOffset;
+ }
+
+ @Override
+ void setIntegerToDoubleValueConversionRatio(double integerToDoubleValueConversionRatio) {
+ nonConcurrentSetIntegerToDoubleValueConversionRatio(integerToDoubleValueConversionRatio);
+ }
+
+ @Override
+ void shiftNormalizingIndexByOffset(int offsetToAdd,
+ boolean lowestHalfBucketPopulated,
+ double newIntegerToDoubleValueConversionRatio) {
+ nonConcurrentNormalizingIndexShift(offsetToAdd, lowestHalfBucketPopulated);
+ }
+
+ @Override
+ void clearCounts() {
+ Arrays.fill(counts, 0);
+ totalCount = 0;
+ }
+
+ @Override
+ public Histogramer makeCopy() {
+ return miniCopy();
+ }
+
+ @Override
+ public ArrayHistogram copy() {
+ ArrayHistogram copy = new ArrayHistogram(this);
+ copy.add(this);
+ return copy;
+ }
+
+ public ArrayHistogram miniCopy() {
+ ArrayHistogram copy = new ArrayHistogram(lowestDiscernibleValue, maxValue < highestTrackableValue ? Math.max(maxValue, lowestDiscernibleValue * 2) : highestTrackableValue, numberOfSignificantValueDigits);
+ copy.add(this);
+ return copy;
+ }
+
+ @Override
+ public ArrayHistogram copyCorrectedForCoordinatedOmission(final long expectedIntervalBetweenValueSamples) {
+ ArrayHistogram copy = new ArrayHistogram(this);
+ copy.addWhileCorrectingForCoordinatedOmission(this, expectedIntervalBetweenValueSamples);
+ return copy;
+ }
+
+ @Override
+ public long getTotalCount() {
+ return totalCount;
+ }
+
+ @Override
+ void setTotalCount(final long totalCount) {
+ this.totalCount = totalCount;
+ }
+
+ @Override
+ void incrementTotalCount() {
+ totalCount++;
+ }
+
+ @Override
+ void addToTotalCount(final long value) {
+ totalCount += value;
+ }
+
+ @Override
+ int _getEstimatedFootprintInBytes() {
+ return (512 + (8 * counts.length));
+ }
+
+ @Override
+ void resize(long newHighestTrackableValue) {
+ int oldNormalizedZeroIndex = normalizeIndex(0, normalizingIndexOffset, countsArrayLength);
+
+ establishSize(newHighestTrackableValue);
+
+ int countsDelta = countsArrayLength - counts.length;
+
+ counts = Arrays.copyOf(counts, countsArrayLength);
+
+ if (oldNormalizedZeroIndex != 0) {
+ // We need to shift the stuff from the zero index and up to the end of the array:
+ int newNormalizedZeroIndex = oldNormalizedZeroIndex + countsDelta;
+ int lengthToCopy = (countsArrayLength - countsDelta) - oldNormalizedZeroIndex;
+ System.arraycopy(counts, oldNormalizedZeroIndex, counts, newNormalizedZeroIndex, lengthToCopy);
+ Arrays.fill(counts, oldNormalizedZeroIndex, newNormalizedZeroIndex, 0);
+ }
+ }
+
+ /**
+ * Construct an auto-resizing histogram with a lowest discernible value of 1 and an auto-adjusting
+ * highestTrackableValue. Can auto-resize up to track values up to (Long.MAX_VALUE / 2).
+ *
+ * @param numberOfSignificantValueDigits Specifies the precision to use. This is the number of significant
+ * decimal digits to which the histogram will maintain value resolution
+ * and separation. Must be a non-negative integer between 0 and 5.
+ */
+ public ArrayHistogram(final int numberOfSignificantValueDigits) {
+ this(1, 2, numberOfSignificantValueDigits);
+ setAutoResize(true);
+ }
+
+ /**
+ * Construct a Histogram given the Highest value to be tracked and a number of significant decimal digits. The
+ * histogram will be constructed to implicitly track (distinguish from 0) values as low as 1.
+ *
+ * @param highestTrackableValue The highest value to be tracked by the histogram. Must be a positive
+ * integer that is {@literal >=} 2.
+ * @param numberOfSignificantValueDigits Specifies the precision to use. This is the number of significant
+ * decimal digits to which the histogram will maintain value resolution
+ * and separation. Must be a non-negative integer between 0 and 5.
+ */
+ public ArrayHistogram(final long highestTrackableValue, final int numberOfSignificantValueDigits) {
+ this(1, highestTrackableValue, numberOfSignificantValueDigits);
+ }
+
+ /**
+ * Construct a Histogram given the Lowest and Highest values to be tracked and a number of significant
+ * decimal digits. Providing a lowestDiscernibleValue is useful is situations where the units used
+ * for the histogram's values are much smaller that the minimal accuracy required. E.g. when tracking
+ * time values stated in nanosecond units, where the minimal accuracy required is a microsecond, the
+ * proper value for lowestDiscernibleValue would be 1000.
+ *
+ * @param lowestDiscernibleValue The lowest value that can be discerned (distinguished from 0) by the
+ * histogram. Must be a positive integer that is {@literal >=} 1. May be
+ * internally rounded down to nearest power of 2.
+ * @param highestTrackableValue The highest value to be tracked by the histogram. Must be a positive
+ * integer that is {@literal >=} (2 * lowestDiscernibleValue).
+ * @param numberOfSignificantValueDigits Specifies the precision to use. This is the number of significant
+ * decimal digits to which the histogram will maintain value resolution
+ * and separation. Must be a non-negative integer between 0 and 5.
+ */
+ public ArrayHistogram(final long lowestDiscernibleValue, final long highestTrackableValue,
+ final int numberOfSignificantValueDigits) {
+ this(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits, true);
+ }
+
+ /**
+ * Construct a histogram with the same range settings as a given source histogram,
+ * duplicating the source's start/end timestamps (but NOT its contents)
+ * @param source The source histogram to duplicate
+ */
+ public ArrayHistogram(final AbstractHistogram source) {
+ this(source, true);
+ }
+
+ ArrayHistogram(final AbstractHistogram source, boolean allocateCountsArray) {
+ super(source);
+ if (allocateCountsArray) {
+ counts = new long[countsArrayLength];
+ }
+ wordSizeInBytes = 8;
+ }
+
+ ArrayHistogram(final long lowestDiscernibleValue, final long highestTrackableValue,
+ final int numberOfSignificantValueDigits, boolean allocateCountsArray) {
+ super(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits);
+ if (allocateCountsArray) {
+ counts = new long[countsArrayLength];
+ }
+ // 写死 = 8
+ wordSizeInBytes = 8;
+ }
+
+ /**
+ * Construct a new histogram by decoding it from a ByteBuffer.
+ * @param buffer The buffer to decode from
+ * @param minBarForHighestTrackableValue Force highestTrackableValue to be set at least this high
+ * @return The newly constructed histogram
+ */
+ public static ArrayHistogram decodeFromByteBuffer(final ByteBuffer buffer,
+ final long minBarForHighestTrackableValue) {
+ return decodeFromByteBuffer(buffer, ArrayHistogram.class, minBarForHighestTrackableValue);
+ }
+
+ /**
+ * Construct a new histogram by decoding it from a compressed form in a ByteBuffer.
+ * @param buffer The buffer to decode from
+ * @param minBarForHighestTrackableValue Force highestTrackableValue to be set at least this high
+ * @return The newly constructed histogram
+ * @throws DataFormatException on error parsing/decompressing the buffer
+ */
+ public static ArrayHistogram decodeFromCompressedByteBuffer(final ByteBuffer buffer,
+ final long minBarForHighestTrackableValue)
+ throws DataFormatException {
+ return decodeFromCompressedByteBuffer(buffer, ArrayHistogram.class, minBarForHighestTrackableValue);
+ }
+
+ private void readObject(final ObjectInputStream o)
+ throws IOException, ClassNotFoundException {
+ o.defaultReadObject();
+ }
+
+ /**
+ * Construct a new Histogram by decoding it from a String containing a base64 encoded
+ * compressed histogram representation.
+ *
+ * @param base64CompressedHistogramString A string containing a base64 encoding of a compressed histogram
+ * @return A Histogream decoded from the string
+ * @throws DataFormatException on error parsing/decompressing the input
+ */
+ public static ArrayHistogram fromString(final String base64CompressedHistogramString)
+ throws DataFormatException {
+ // 这还有个base64字符串的解析
+ return decodeFromCompressedByteBuffer(
+ ByteBuffer.wrap(Base64Helper.parseBase64Binary(base64CompressedHistogramString)),
+ 0);
+ }
+
+ @Override
+ public List<Percentile> percentileList(int percentileTicksPerHalfDistance) {
+ List<Percentile> percentiles = new ArrayList<>();
+ for (HistogramIterationValue percentile : this.percentiles(percentileTicksPerHalfDistance)) {
+ if(percentile.getCountAddedInThisIterationStep() > 0){
+ percentiles.add(new Percentile(percentile.getValueIteratedTo(), percentile.getCountAddedInThisIterationStep(), percentile.getPercentile()));
+ }
+ }
+ return percentiles;
+ }
+
+ @Override
+ public Map<String, Object> describe() {
+ long min = getMinValue();
+ long max = getMaxValue(); // max = this.maxValue;
+ long count = getTotalCount();
+ double mean = getMean();
+ long sum = (long) (mean * count);
+ mean = Math.round(mean * 100.0) / 100.0;
+ long p25 = getValueAtPercentile(25);
+ long p50 = getValueAtPercentile(50);
+ long p75 = getValueAtPercentile(75);
+ long p90 = getValueAtPercentile(90);
+ long p95 = getValueAtPercentile(95);
+ long p99 = getValueAtPercentile(99);
+ Map<String, Object> rst = new LinkedHashMap<>();
+ rst.put("count", count);
+ rst.put("mean", mean);
+ rst.put("sum", sum);
+ rst.put("min", min);
+ rst.put("p25", p25);
+ rst.put("p50", p50);
+ rst.put("p75", p75);
+ rst.put("p90", p90);
+ rst.put("p95", p95);
+ rst.put("p99", p99);
+ rst.put("max", max);
+ return rst;
+ }
+
+ @Override
+ public Histogramer resetHistogram() {
+ if(isAutoResize()){
+ return new ArrayHistogram(this.numberOfSignificantValueDigits);
+ }else{
+ this.reset();
+ return this;
+ }
+ }
+
+ @Override
+ public Histogramer merge(Histogramer histogram) {
+ if(histogram instanceof AbstractHistogram){
+ this.add((AbstractHistogram)histogram);
+ return this;
+ }else if(histogram instanceof DirectMapHistogram){
+ try {
+ ((DirectMapHistogram)histogram).mergeInto(this);
+ return this;
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }else{
+ throw new UnsupportedOperationException("unsupported method");
+ }
+ }
+
+ @Override
+ public byte[] toBytes() {
+ ByteBuffer byteBuffer = ByteBuffer.allocate(this.getNeededByteBufferCapacity());
+ this.encodeIntoByteBuffer(byteBuffer);
+ return byteBuffer2Bytes(byteBuffer);
+ }
+
+ public static ArrayHistogram fromBytes(byte[] bytes) {
+ ByteBuffer byteBuffer = ByteBuffer.wrap(bytes);
+ return fromByteBuffer(byteBuffer);
+ }
+
+ public static ArrayHistogram fromByteBuffer(ByteBuffer byteBuffer) {
+ int initPosition = byteBuffer.position();
+ int cookie = byteBuffer.getInt(initPosition);
+ if(DirectMapHistogram.getCookieBase(cookie) == DirectMapHistogram.V2CompressedEncodingCookieBase){
+ try {
+ return ArrayHistogram.decodeFromCompressedByteBuffer(byteBuffer, 2);
+ } catch (DataFormatException e) {
+ throw new RuntimeException(e);
+ }
+ }else if(DirectMapHistogram.getCookieBase(cookie) == DirectMapHistogram.V2EncodingCookieBase){
+ return ArrayHistogram.decodeFromByteBuffer(byteBuffer, 2);
+ }
+ throw new UnsupportedOperationException("unsupported method");
+ }
+}
diff --git a/src/main/java/org/HdrHistogram/DirectArrayHistogram.java b/src/main/java/org/HdrHistogram/DirectArrayHistogram.java
new file mode 100644
index 0000000..0b2636f
--- /dev/null
+++ b/src/main/java/org/HdrHistogram/DirectArrayHistogram.java
@@ -0,0 +1,234 @@
+package org.HdrHistogram;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+public class DirectArrayHistogram extends AbstractHistogram implements Histogramer{
+ long totalCount;
+ int normalizingIndexOffset;
+ private ByteBuffer byteBuffer;
+ private int initPosition;
+
+ public DirectArrayHistogram(final long lowestDiscernibleValue, final long highestTrackableValue,
+ final int numberOfSignificantValueDigits, ByteBuffer byteBuffer) {
+ super(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits);
+ this.byteBuffer = byteBuffer;
+ this.initPosition = byteBuffer.position();
+ wordSizeInBytes = 8;
+ }
+
+ // druid内部使用
+ public void resetByteBuffer(ByteBuffer byteBuffer){
+ this.byteBuffer = byteBuffer;
+ this.initPosition = byteBuffer.position();
+ }
+
+ @Override
+ long getCountAtIndex(int index) {
+ int i = normalizeIndex(index, normalizingIndexOffset, countsArrayLength);
+ return byteBuffer.getLong(initPosition + i * 8);
+ }
+
+ @Override
+ long getCountAtNormalizedIndex(int index) {
+ return byteBuffer.getLong(initPosition + index * 8);
+ }
+
+ @Override
+ void incrementCountAtIndex(int index) {
+ int i = normalizeIndex(index, normalizingIndexOffset, countsArrayLength);
+ int pos = initPosition + i * 8;
+ long val = byteBuffer.getLong(pos);
+ byteBuffer.putLong(pos, val + 1);
+ }
+
+ @Override
+ void addToCountAtIndex(int index, long value) {
+ int i = normalizeIndex(index, normalizingIndexOffset, countsArrayLength);
+ int pos = initPosition + i * 8;
+ long val = byteBuffer.getLong(pos);
+ byteBuffer.putLong(pos, val + value);
+ }
+
+ @Override
+ void setCountAtIndex(int index, long value) {
+ int i = normalizeIndex(index, normalizingIndexOffset, countsArrayLength);
+ int pos = initPosition + i * 8;
+ byteBuffer.putLong(pos, value);
+ }
+
+ @Override
+ void setCountAtNormalizedIndex(int index, long value) {
+ int pos = initPosition + index * 8;
+ byteBuffer.putLong(pos, value);
+ }
+
+ @Override
+ int getNormalizingIndexOffset() {
+ return normalizingIndexOffset;
+ }
+
+ @Override
+ void setNormalizingIndexOffset(int normalizingIndexOffset) {
+ if(normalizingIndexOffset == 0){
+ this.normalizingIndexOffset = normalizingIndexOffset;
+ }else{
+ throw new RuntimeException("cant not setNormalizingIndexOffset");
+ }
+ }
+
+ @Override
+ void setIntegerToDoubleValueConversionRatio(double integerToDoubleValueConversionRatio) {
+ nonConcurrentSetIntegerToDoubleValueConversionRatio(integerToDoubleValueConversionRatio);
+ }
+
+ @Override
+ void shiftNormalizingIndexByOffset(int offsetToAdd, boolean lowestHalfBucketPopulated, double newIntegerToDoubleValueConversionRatio) {
+ nonConcurrentNormalizingIndexShift(offsetToAdd, lowestHalfBucketPopulated);
+ }
+
+ @Override
+ void clearCounts() {
+ for (int i = 0; i < countsArrayLength; i++) {
+ byteBuffer.putLong(initPosition + i * 8, 0L);
+ }
+ totalCount = 0;
+ }
+
+ @Override
+ public Histogramer makeCopy() {
+ return miniCopy();
+ }
+
+ @Override
+ public ArrayHistogram copy() {
+ ArrayHistogram copy = new ArrayHistogram(this);
+ copy.add(this);
+ return copy;
+ }
+
+ public ArrayHistogram miniCopy() {
+ ArrayHistogram copy = new ArrayHistogram(lowestDiscernibleValue, maxValue < highestTrackableValue ? Math.max(maxValue, lowestDiscernibleValue * 2) : highestTrackableValue, numberOfSignificantValueDigits);
+ copy.add(this);
+ return copy;
+ }
+
+ @Override
+ public AbstractHistogram copyCorrectedForCoordinatedOmission(long expectedIntervalBetweenValueSamples) {
+ Histogram copy = new Histogram(this);
+ copy.addWhileCorrectingForCoordinatedOmission(this, expectedIntervalBetweenValueSamples);
+ return copy;
+ }
+
+ @Override
+ public long getTotalCount() {
+ return totalCount;
+ }
+
+ @Override
+ void setTotalCount(final long totalCount) {
+ this.totalCount = totalCount;
+ }
+
+ @Override
+ void incrementTotalCount() {
+ totalCount++;
+ }
+
+ @Override
+ void addToTotalCount(long value) {
+ totalCount += value;
+ }
+
+
+ @Override
+ int _getEstimatedFootprintInBytes() {
+ return (512 + (8 * countsArrayLength));
+ }
+
+ @Override
+ void resize(long newHighestTrackableValue) {
+ throw new RuntimeException("cant not resize");
+ }
+
+ public static int getCountsArrayLength(long lowestDiscernibleValue, long highestTrackableValue, int numberOfSignificantValueDigits){
+ Histogram his = new Histogram(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits, false);
+ return his.countsArrayLength;
+ }
+
+ public static final int getUpdatableSerializationBytes(long lowestDiscernibleValue, long highestTrackableValue, int numberOfSignificantValueDigits){
+ return getCountsArrayLength(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits) * 8;
+ }
+
+ @Override
+ public List<Percentile> percentileList(int percentileTicksPerHalfDistance) {
+ List<Percentile> percentiles = new ArrayList<>();
+ for (HistogramIterationValue percentile : this.percentiles(percentileTicksPerHalfDistance)) {
+ if(percentile.getCountAddedInThisIterationStep() > 0){
+ percentiles.add(new Percentile(percentile.getValueIteratedTo(), percentile.getCountAddedInThisIterationStep(), percentile.getPercentile()));
+ }
+ }
+ return percentiles;
+ }
+
+ @Override
+ public Map<String, Object> describe() {
+ long min = getMinValue();
+ long max = getMaxValue(); // max = this.maxValue;
+ long count = getTotalCount();
+ double mean = getMean();
+ long sum = (long) (mean * count);
+ mean = Math.round(mean * 100.0) / 100.0;
+ long p25 = getValueAtPercentile(25);
+ long p50 = getValueAtPercentile(50);
+ long p75 = getValueAtPercentile(75);
+ long p90 = getValueAtPercentile(90);
+ long p95 = getValueAtPercentile(95);
+ long p99 = getValueAtPercentile(99);
+ Map<String, Object> rst = new LinkedHashMap<>();
+ rst.put("count", count);
+ rst.put("mean", mean);
+ rst.put("sum", sum);
+ rst.put("min", min);
+ rst.put("p25", p25);
+ rst.put("p50", p50);
+ rst.put("p75", p75);
+ rst.put("p90", p90);
+ rst.put("p95", p95);
+ rst.put("p99", p99);
+ rst.put("max", max);
+ return rst;
+ }
+
+ @Override
+ public Histogramer resetHistogram() {
+ throw new UnsupportedOperationException("unsupported method");
+ }
+
+ @Override
+ public Histogramer merge(Histogramer histogram) {
+ if(histogram instanceof AbstractHistogram){
+ this.add((AbstractHistogram)histogram);
+ return this;
+ }else if(histogram instanceof DirectMapHistogram){
+ try {
+ ((DirectMapHistogram)histogram).mergeInto(this);
+ return this;
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }else{
+ throw new UnsupportedOperationException("unsupported method");
+ }
+ }
+
+ @Override
+ public byte[] toBytes() {
+ ByteBuffer byteBuffer = ByteBuffer.allocate(this.getNeededByteBufferCapacity());
+ this.encodeIntoByteBuffer(byteBuffer);
+ return byteBuffer2Bytes(byteBuffer);
+ }
+}
diff --git a/src/main/java/org/HdrHistogram/DirectMapHistogram.java b/src/main/java/org/HdrHistogram/DirectMapHistogram.java
new file mode 100644
index 0000000..bc0951d
--- /dev/null
+++ b/src/main/java/org/HdrHistogram/DirectMapHistogram.java
@@ -0,0 +1,492 @@
+package org.HdrHistogram;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.List;
+import java.util.Map;
+import java.util.zip.DataFormatException;
+import java.util.zip.Inflater;
+
+import static java.nio.ByteOrder.BIG_ENDIAN;
+
+/**
+ * 直接映射字节数组到Histogram,只读的Histogram,用于druid查询,减少gc减少计算,序列化后的是稀疏数组的形式
+ */
+public class DirectMapHistogram implements Histogramer{
+ static final int V2maxWordSizeInBytes = 9; // LEB128-64b9B + ZigZag require up to 9 bytes per word
+ static final int V2EncodingCookieBase = 0x1c849303;
+ static final int V2CompressedEncodingCookieBase = 0x1c849304;
+
+ final ByteBuffer byteBuffer;
+ final int initPosition;
+ long totalCount;
+
+ private DirectMapHistogram(ByteBuffer byteBuffer) {
+ int initPosition = byteBuffer.position();
+ this.byteBuffer = byteBuffer;
+ this.initPosition = initPosition;
+ this.totalCount = -1;
+ }
+
+ public static boolean byteBufferCanToDirectMapHistogram(ByteBuffer byteBuffer) {
+ int initPosition = byteBuffer.position();
+ int cookie = byteBuffer.getInt(initPosition);
+ return getCookieBase(cookie) == V2EncodingCookieBase || getCookieBase(cookie) == V2CompressedEncodingCookieBase;
+ }
+
+ public static DirectMapHistogram wrapBytes(byte[] bytes) {
+ ByteBuffer byteBuffer = ByteBuffer.wrap(bytes);
+ return wrapByteBuffer(byteBuffer);
+ }
+
+ public static DirectMapHistogram wrapByteBuffer(ByteBuffer byteBuffer) {
+ if(byteBufferCanToDirectMapHistogram(byteBuffer)){
+ DirectMapHistogram hll = new DirectMapHistogram(byteBuffer);
+ return hll;
+ }
+ throw new RuntimeException("can not wrapByteBuffer");
+ }
+
+ public void mergeInto(AbstractHistogram histogram) throws Exception{
+ int cookie = byteBuffer.getInt(initPosition);
+ if(getCookieBase(cookie) == V2CompressedEncodingCookieBase){
+ final int lengthOfCompressedContents = byteBuffer.getInt(initPosition + 4);
+ final Inflater decompressor = new Inflater();
+
+ if (byteBuffer.hasArray()) {
+ decompressor.setInput(byteBuffer.array(), initPosition + 8, lengthOfCompressedContents);
+ } else {
+ byte[] compressedContents = new byte[lengthOfCompressedContents];
+ byteBuffer.position(initPosition + 8);
+ try {
+ byteBuffer.get(compressedContents);
+ decompressor.setInput(compressedContents);
+ }finally {
+ byteBuffer.position(initPosition);
+ }
+ }
+ final int headerSize = 40;
+ final ByteBuffer headerBuffer = ByteBuffer.allocate(headerSize).order(BIG_ENDIAN);
+ decompressor.inflate(headerBuffer.array());
+
+ cookie = headerBuffer.getInt();
+ final int payloadLengthInBytes;
+ final int normalizingIndexOffset;
+ final int numberOfSignificantValueDigits;
+ final long lowestTrackableUnitValue;
+ long highestTrackableValue;
+ final double integerToDoubleValueConversionRatio;
+
+ assert getCookieBase(cookie) == V2EncodingCookieBase;
+
+ payloadLengthInBytes = headerBuffer.getInt(4);
+ normalizingIndexOffset = headerBuffer.getInt(8);
+ numberOfSignificantValueDigits = headerBuffer.getInt( 12);
+ lowestTrackableUnitValue = headerBuffer.getLong(16);
+ highestTrackableValue = headerBuffer.getLong(24);
+ integerToDoubleValueConversionRatio = headerBuffer.getDouble(32);
+
+ highestTrackableValue = Math.max(highestTrackableValue, 2);
+
+ final long largestValueWithSingleUnitResolution = 2 * (long) Math.pow(10, numberOfSignificantValueDigits);
+ final int unitMagnitude = (int) (Math.log(lowestTrackableUnitValue)/Math.log(2));
+ final long unitMagnitudeMask = (1 << unitMagnitude) - 1;
+ int subBucketCountMagnitude = (int) Math.ceil(Math.log(largestValueWithSingleUnitResolution)/Math.log(2));
+ final int subBucketHalfCountMagnitude = subBucketCountMagnitude - 1;
+ final int subBucketCount = 1 << subBucketCountMagnitude;
+ final int subBucketHalfCount = subBucketCount / 2;
+ final long subBucketMask = ((long)subBucketCount - 1) << unitMagnitude;
+ if (subBucketCountMagnitude + unitMagnitude > 62) {
+ // subBucketCount entries can't be represented, with unitMagnitude applied, in a positive long.
+ // Technically it still sort of works if their sum is 63: you can represent all but the last number
+ // in the shifted subBucketCount. However, the utility of such a histogram vs ones whose magnitude here
+ // fits in 62 bits is debatable, and it makes it harder to work through the logic.
+ // Sums larger than 64 are totally broken as leadingZeroCountBase would go negative.
+ throw new IllegalArgumentException("Cannot represent numberOfSignificantValueDigits worth of values " +
+ "beyond lowestDiscernibleValue");
+ }
+
+ final int expectedCapacity = payloadLengthInBytes;
+
+ ByteBuffer sourceBuffer = ByteBuffer.allocate(expectedCapacity).order(BIG_ENDIAN);
+ int decompressedByteCount = decompressor.inflate(sourceBuffer.array());
+ decompressor.end(); // 必须手动调用,否则快速调用可能内存溢出(堆外内存)
+ if ((payloadLengthInBytes != Integer.MAX_VALUE) && (decompressedByteCount < payloadLengthInBytes)) {
+ throw new IllegalArgumentException("The buffer does not contain the indicated payload amount");
+ }
+ assert decompressedByteCount == expectedCapacity;
+
+ int dstIndex = 0;
+ int endPosition = sourceBuffer.position() + expectedCapacity; //期望的结束读取的索引
+ while (sourceBuffer.position() < endPosition) {
+ long count;
+ int zerosCount = 0;
+ // V2 encoding format uses a long encoded in a ZigZag LEB128 format (up to V2maxWordSizeInBytes):
+ count = ZigZagEncoding.getLong(sourceBuffer);
+ if (count < 0) {
+ long zc = -count; // 0值的连续个数
+ if (zc > Integer.MAX_VALUE) {
+ throw new IllegalArgumentException(
+ "An encoded zero count of > Integer.MAX_VALUE was encountered in the source");
+ }
+ zerosCount = (int) zc;
+ }
+ if (zerosCount > 0) {
+ dstIndex += zerosCount; // No need to set zeros in array. Just skip them.
+ } else {
+ // 单个非连续的0也会被输出
+ if(count > 0){
+ long value = valueFromIndex(dstIndex, subBucketHalfCountMagnitude, subBucketHalfCount, unitMagnitude);
+ histogram.recordValueWithCount(value, count);
+ }
+ dstIndex++;
+ }
+ }
+
+ }else if(getCookieBase(cookie) == V2EncodingCookieBase){
+ final int payloadLengthInBytes;
+ final int normalizingIndexOffset;
+ final int numberOfSignificantValueDigits;
+ final long lowestTrackableUnitValue;
+ long highestTrackableValue;
+ final double integerToDoubleValueConversionRatio;
+
+ payloadLengthInBytes = byteBuffer.getInt(initPosition + 4);
+ normalizingIndexOffset = byteBuffer.getInt(initPosition + 8);
+ numberOfSignificantValueDigits = byteBuffer.getInt(initPosition + 12);
+ lowestTrackableUnitValue = byteBuffer.getLong(initPosition + 16);
+ highestTrackableValue = byteBuffer.getLong(initPosition + 24);
+ integerToDoubleValueConversionRatio = byteBuffer.getDouble(initPosition + 32);
+
+ highestTrackableValue = Math.max(highestTrackableValue, 2);
+
+ final long largestValueWithSingleUnitResolution = 2 * (long) Math.pow(10, numberOfSignificantValueDigits);
+ final int unitMagnitude = (int) (Math.log(lowestTrackableUnitValue)/Math.log(2));
+ final long unitMagnitudeMask = (1 << unitMagnitude) - 1;
+ int subBucketCountMagnitude = (int) Math.ceil(Math.log(largestValueWithSingleUnitResolution)/Math.log(2));
+ final int subBucketHalfCountMagnitude = subBucketCountMagnitude - 1;
+ final int subBucketCount = 1 << subBucketCountMagnitude;
+ final int subBucketHalfCount = subBucketCount / 2;
+ final long subBucketMask = ((long)subBucketCount - 1) << unitMagnitude;
+ if (subBucketCountMagnitude + unitMagnitude > 62) {
+ // subBucketCount entries can't be represented, with unitMagnitude applied, in a positive long.
+ // Technically it still sort of works if their sum is 63: you can represent all but the last number
+ // in the shifted subBucketCount. However, the utility of such a histogram vs ones whose magnitude here
+ // fits in 62 bits is debatable, and it makes it harder to work through the logic.
+ // Sums larger than 64 are totally broken as leadingZeroCountBase would go negative.
+ throw new IllegalArgumentException("Cannot represent numberOfSignificantValueDigits worth of values " +
+ "beyond lowestDiscernibleValue");
+ }
+
+ final int expectedCapacity =payloadLengthInBytes;
+ assert expectedCapacity == payloadLengthInBytes;
+ if(expectedCapacity > byteBuffer.limit() - 40){
+ throw new IllegalArgumentException("The buffer does not contain the full Histogram payload");
+ }
+ final int position = initPosition + 40;
+ final int lengthInBytes = expectedCapacity;
+ final int wordSizeInBytes = V2maxWordSizeInBytes;
+ // fillCountsArrayFromSourceBuffer
+
+ ByteBuffer sourceBuffer = byteBuffer.duplicate();
+ sourceBuffer.position(position);
+ final long maxAllowableCountInHistigram = Long.MAX_VALUE;
+ int dstIndex = 0;
+ int endPosition = sourceBuffer.position() + lengthInBytes; //期望的结束读取的索引
+ while (sourceBuffer.position() < endPosition) {
+ long count;
+ int zerosCount = 0;
+ // V2 encoding format uses a long encoded in a ZigZag LEB128 format (up to V2maxWordSizeInBytes):
+ count = ZigZagEncoding.getLong(sourceBuffer);
+ if (count < 0) {
+ long zc = -count; // 0值的连续个数
+ if (zc > Integer.MAX_VALUE) {
+ throw new IllegalArgumentException(
+ "An encoded zero count of > Integer.MAX_VALUE was encountered in the source");
+ }
+ zerosCount = (int) zc;
+ }
+ if (zerosCount > 0) {
+ dstIndex += zerosCount; // No need to set zeros in array. Just skip them.
+ } else {
+ // 单个非连续的0也会被输出
+ if(count > 0){
+ long value = valueFromIndex(dstIndex, subBucketHalfCountMagnitude, subBucketHalfCount, unitMagnitude);
+ histogram.recordValueWithCount(value, count);
+ }
+ dstIndex++;
+ }
+ }
+ }else{
+ throw new RuntimeException("can not wrapByteBuffer");
+ }
+ }
+
+ final long valueFromIndex(final int index, int subBucketHalfCountMagnitude, int subBucketHalfCount, int unitMagnitude) {
+ int bucketIndex = (index >> subBucketHalfCountMagnitude) - 1;
+ int subBucketIndex = (index & (subBucketHalfCount - 1)) + subBucketHalfCount;
+ if (bucketIndex < 0) {
+ subBucketIndex -= subBucketHalfCount;
+ bucketIndex = 0;
+ }
+ return valueFromIndex(bucketIndex, subBucketIndex, unitMagnitude);
+ }
+
+ private long valueFromIndex(final int bucketIndex, final int subBucketIndex, int unitMagnitude) {
+ return ((long) subBucketIndex) << (bucketIndex + unitMagnitude);
+ }
+
+ static int getCookieBase(final int cookie) {
+ return (cookie & ~0xf0);
+ }
+
+ @Override
+ public long getTotalCount() {
+ if(totalCount >= 0){
+ return totalCount;
+ }
+ try {
+ totalCount = 0;
+ int cookie = byteBuffer.getInt(initPosition);
+ if(getCookieBase(cookie) == V2CompressedEncodingCookieBase){
+ final int lengthOfCompressedContents = byteBuffer.getInt(initPosition + 4);
+ final Inflater decompressor = new Inflater();
+
+ if (byteBuffer.hasArray()) {
+ decompressor.setInput(byteBuffer.array(), initPosition + 8, lengthOfCompressedContents);
+ } else {
+ byte[] compressedContents = new byte[lengthOfCompressedContents];
+ byteBuffer.position(initPosition + 8);
+ try {
+ byteBuffer.get(compressedContents);
+ decompressor.setInput(compressedContents);
+ }finally {
+ byteBuffer.position(initPosition);
+ }
+ }
+ final int headerSize = 40;
+ final ByteBuffer headerBuffer = ByteBuffer.allocate(headerSize).order(BIG_ENDIAN);
+ decompressor.inflate(headerBuffer.array());
+
+ cookie = headerBuffer.getInt();
+ final int payloadLengthInBytes;
+ final int normalizingIndexOffset;
+ final int numberOfSignificantValueDigits;
+ final long lowestTrackableUnitValue;
+ long highestTrackableValue;
+ final double integerToDoubleValueConversionRatio;
+
+ assert getCookieBase(cookie) == V2EncodingCookieBase;
+
+ payloadLengthInBytes = headerBuffer.getInt(4);
+ normalizingIndexOffset = headerBuffer.getInt(8);
+ numberOfSignificantValueDigits = headerBuffer.getInt( 12);
+ lowestTrackableUnitValue = headerBuffer.getLong(16);
+ highestTrackableValue = headerBuffer.getLong(24);
+ integerToDoubleValueConversionRatio = headerBuffer.getDouble(32);
+
+ highestTrackableValue = Math.max(highestTrackableValue, 2);
+
+ final long largestValueWithSingleUnitResolution = 2 * (long) Math.pow(10, numberOfSignificantValueDigits);
+ final int unitMagnitude = (int) (Math.log(lowestTrackableUnitValue)/Math.log(2));
+ final long unitMagnitudeMask = (1 << unitMagnitude) - 1;
+ int subBucketCountMagnitude = (int) Math.ceil(Math.log(largestValueWithSingleUnitResolution)/Math.log(2));
+ final int subBucketHalfCountMagnitude = subBucketCountMagnitude - 1;
+ final int subBucketCount = 1 << subBucketCountMagnitude;
+ final int subBucketHalfCount = subBucketCount / 2;
+ final long subBucketMask = ((long)subBucketCount - 1) << unitMagnitude;
+ if (subBucketCountMagnitude + unitMagnitude > 62) {
+ // subBucketCount entries can't be represented, with unitMagnitude applied, in a positive long.
+ // Technically it still sort of works if their sum is 63: you can represent all but the last number
+ // in the shifted subBucketCount. However, the utility of such a histogram vs ones whose magnitude here
+ // fits in 62 bits is debatable, and it makes it harder to work through the logic.
+ // Sums larger than 64 are totally broken as leadingZeroCountBase would go negative.
+ throw new IllegalArgumentException("Cannot represent numberOfSignificantValueDigits worth of values " +
+ "beyond lowestDiscernibleValue");
+ }
+
+ final int expectedCapacity = payloadLengthInBytes;
+
+ ByteBuffer sourceBuffer = ByteBuffer.allocate(expectedCapacity).order(BIG_ENDIAN);
+ int decompressedByteCount = decompressor.inflate(sourceBuffer.array());
+ decompressor.end(); // 必须手动调用,否则快速调用可能内存溢出(堆外内存)
+ if ((payloadLengthInBytes != Integer.MAX_VALUE) && (decompressedByteCount < payloadLengthInBytes)) {
+ throw new IllegalArgumentException("The buffer does not contain the indicated payload amount");
+ }
+ assert decompressedByteCount == expectedCapacity;
+
+ int dstIndex = 0;
+ int endPosition = sourceBuffer.position() + expectedCapacity; //期望的结束读取的索引
+ while (sourceBuffer.position() < endPosition) {
+ long count;
+ int zerosCount = 0;
+ // V2 encoding format uses a long encoded in a ZigZag LEB128 format (up to V2maxWordSizeInBytes):
+ count = ZigZagEncoding.getLong(sourceBuffer);
+ if (count < 0) {
+ long zc = -count; // 0值的连续个数
+ if (zc > Integer.MAX_VALUE) {
+ throw new IllegalArgumentException(
+ "An encoded zero count of > Integer.MAX_VALUE was encountered in the source");
+ }
+ zerosCount = (int) zc;
+ }
+ if (zerosCount > 0) {
+ dstIndex += zerosCount; // No need to set zeros in array. Just skip them.
+ } else {
+ // 单个非连续的0也会被输出
+ if(count > 0){
+ //long value = valueFromIndex(dstIndex, subBucketHalfCountMagnitude, subBucketHalfCount, unitMagnitude);
+ //histogram.recordValueWithCount(value, count);
+ totalCount += count;
+ }
+ dstIndex++;
+ }
+ }
+ return totalCount;
+ }else if(getCookieBase(cookie) == V2EncodingCookieBase){
+ final int payloadLengthInBytes;
+ final int normalizingIndexOffset;
+ final int numberOfSignificantValueDigits;
+ final long lowestTrackableUnitValue;
+ long highestTrackableValue;
+ final double integerToDoubleValueConversionRatio;
+
+ payloadLengthInBytes = byteBuffer.getInt(initPosition + 4);
+ normalizingIndexOffset = byteBuffer.getInt(initPosition + 8);
+ numberOfSignificantValueDigits = byteBuffer.getInt(initPosition + 12);
+ lowestTrackableUnitValue = byteBuffer.getLong(initPosition + 16);
+ highestTrackableValue = byteBuffer.getLong(initPosition + 24);
+ integerToDoubleValueConversionRatio = byteBuffer.getDouble(initPosition + 32);
+
+ highestTrackableValue = Math.max(highestTrackableValue, 2);
+
+ final long largestValueWithSingleUnitResolution = 2 * (long) Math.pow(10, numberOfSignificantValueDigits);
+ final int unitMagnitude = (int) (Math.log(lowestTrackableUnitValue)/Math.log(2));
+ final long unitMagnitudeMask = (1 << unitMagnitude) - 1;
+ int subBucketCountMagnitude = (int) Math.ceil(Math.log(largestValueWithSingleUnitResolution)/Math.log(2));
+ final int subBucketHalfCountMagnitude = subBucketCountMagnitude - 1;
+ final int subBucketCount = 1 << subBucketCountMagnitude;
+ final int subBucketHalfCount = subBucketCount / 2;
+ final long subBucketMask = ((long)subBucketCount - 1) << unitMagnitude;
+ if (subBucketCountMagnitude + unitMagnitude > 62) {
+ // subBucketCount entries can't be represented, with unitMagnitude applied, in a positive long.
+ // Technically it still sort of works if their sum is 63: you can represent all but the last number
+ // in the shifted subBucketCount. However, the utility of such a histogram vs ones whose magnitude here
+ // fits in 62 bits is debatable, and it makes it harder to work through the logic.
+ // Sums larger than 64 are totally broken as leadingZeroCountBase would go negative.
+ throw new IllegalArgumentException("Cannot represent numberOfSignificantValueDigits worth of values " +
+ "beyond lowestDiscernibleValue");
+ }
+
+ final int expectedCapacity =payloadLengthInBytes;
+ assert expectedCapacity == payloadLengthInBytes;
+ if(expectedCapacity > byteBuffer.limit() - 40){
+ throw new IllegalArgumentException("The buffer does not contain the full Histogram payload");
+ }
+ final int position = initPosition + 40;
+ final int lengthInBytes = expectedCapacity;
+ final int wordSizeInBytes = V2maxWordSizeInBytes;
+ // fillCountsArrayFromSourceBuffer
+
+ ByteBuffer sourceBuffer = byteBuffer.duplicate();
+ sourceBuffer.position(position);
+ final long maxAllowableCountInHistigram = Long.MAX_VALUE;
+ int dstIndex = 0;
+ int endPosition = sourceBuffer.position() + lengthInBytes; //期望的结束读取的索引
+ while (sourceBuffer.position() < endPosition) {
+ long count;
+ int zerosCount = 0;
+ // V2 encoding format uses a long encoded in a ZigZag LEB128 format (up to V2maxWordSizeInBytes):
+ count = ZigZagEncoding.getLong(sourceBuffer);
+ if (count < 0) {
+ long zc = -count; // 0值的连续个数
+ if (zc > Integer.MAX_VALUE) {
+ throw new IllegalArgumentException(
+ "An encoded zero count of > Integer.MAX_VALUE was encountered in the source");
+ }
+ zerosCount = (int) zc;
+ }
+ if (zerosCount > 0) {
+ dstIndex += zerosCount; // No need to set zeros in array. Just skip them.
+ } else {
+ // 单个非连续的0也会被输出
+ if(count > 0){
+ //long value = valueFromIndex(dstIndex, subBucketHalfCountMagnitude, subBucketHalfCount, unitMagnitude);
+ //histogram.recordValueWithCount(value, count);
+ totalCount += count;
+ }
+ dstIndex++;
+ }
+ }
+ return totalCount;
+ }else{
+ throw new UnsupportedOperationException("unsupported method");
+ }
+ } catch (DataFormatException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void recordValue(long value) throws RuntimeException {
+ throw new UnsupportedOperationException("unsupported method");
+ }
+
+ @Override
+ public void recordValueWithCount(long value, long count) throws RuntimeException {
+ throw new UnsupportedOperationException("unsupported method");
+ }
+
+ @Override
+ public long getValueAtPercentile(double percentile) {
+ throw new UnsupportedOperationException("unsupported method");
+ }
+
+ @Override
+ public List<Percentile> percentileList(int percentileTicksPerHalfDistance) {
+ throw new UnsupportedOperationException("unsupported method");
+ }
+
+ @Override
+ public Map<String, Object> describe() {
+ throw new UnsupportedOperationException("unsupported method");
+ }
+
+ @Override
+ public Histogramer resetHistogram() {
+ throw new UnsupportedOperationException("unsupported method");
+ }
+
+ @Override
+ public Histogramer merge(Histogramer histogram) {
+ throw new UnsupportedOperationException("unsupported method");
+ }
+
+ @Override
+ public Histogramer makeCopy() throws RuntimeException{
+ int cookie = byteBuffer.getInt(initPosition);
+ if(getCookieBase(cookie) == V2CompressedEncodingCookieBase){
+ try {
+ return ArrayHistogram.decodeFromCompressedByteBuffer(byteBuffer, 2);
+ } catch (DataFormatException e) {
+ throw new RuntimeException(e);
+ }
+ }else if(getCookieBase(cookie) == V2EncodingCookieBase){
+ return ArrayHistogram.decodeFromByteBuffer(byteBuffer, 2);
+ }
+ throw new UnsupportedOperationException("unsupported method");
+ }
+
+ @Override
+ public byte[] toBytes() {
+ int size = byteBuffer.limit() - initPosition;
+ byte[] bytes = new byte[size];
+ assert byteBuffer.order() == ByteOrder.BIG_ENDIAN;
+ int oldPosition = byteBuffer.position();
+ byteBuffer.position(initPosition);
+ byteBuffer.get(bytes, 0, size);
+ byteBuffer.position(oldPosition);
+ return bytes;
+ }
+}
+
diff --git a/src/main/java/org/HdrHistogram/HistogramSketch.java b/src/main/java/org/HdrHistogram/HistogramSketch.java
new file mode 100644
index 0000000..b527e75
--- /dev/null
+++ b/src/main/java/org/HdrHistogram/HistogramSketch.java
@@ -0,0 +1,90 @@
+package org.HdrHistogram;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Map;
+
+public class HistogramSketch {
+ public Histogramer hisImpl = null;
+
+ public HistogramSketch(final int numberOfSignificantValueDigits){
+ hisImpl = new ArrayHistogram(numberOfSignificantValueDigits);
+ }
+
+ public HistogramSketch(final long lowestDiscernibleValue, final long highestTrackableValue,
+ final int numberOfSignificantValueDigits, final boolean autoResize){
+ ArrayHistogram histogram = new ArrayHistogram(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits);
+ histogram.setAutoResize(autoResize);
+ hisImpl = histogram;
+ }
+
+ public HistogramSketch(final Histogramer that) {
+ hisImpl = that;
+ }
+
+ /**
+ * Copy constructor used by copy().
+ */
+ HistogramSketch(final HistogramSketch that) {
+ hisImpl = that.hisImpl.makeCopy();
+ }
+
+ /**
+ * 复制hisImpl到堆内存实例hisImpl
+ */
+ public HistogramSketch copy() {
+ return new HistogramSketch(this);
+ }
+
+ public void reset() {
+ hisImpl = hisImpl.resetHistogram();
+ }
+
+ public long getTotalCount(){
+ return hisImpl.getTotalCount();
+ }
+
+ public void recordValue(long value){
+ hisImpl.recordValue(value);
+ }
+
+ public void recordValueWithCount(long value, long count){
+ hisImpl.recordValueWithCount(value, count);
+ }
+
+ public long getValueAtPercentile(double percentile){
+ return hisImpl.getValueAtPercentile(percentile);
+ }
+
+ public List<Percentile> percentileList(int percentileTicksPerHalfDistance){
+ return hisImpl.percentileList(percentileTicksPerHalfDistance);
+ }
+
+ public Map<String, Object> describe(){
+ return hisImpl.describe();
+ }
+
+ public static final int getUpdatableSerializationBytes(long lowestDiscernibleValue, long highestTrackableValue, int numberOfSignificantValueDigits){
+ return DirectArrayHistogram.getUpdatableSerializationBytes(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits);
+ }
+
+ public byte[] toBytes() {
+ return hisImpl.toBytes();
+ }
+
+ public static HistogramSketch fromBytes(byte[] bytes) {
+ return new HistogramSketch(ArrayHistogram.fromBytes(bytes));
+ }
+
+ public static HistogramSketch fromByteBuffer(ByteBuffer byteBuffer) {
+ return new HistogramSketch(ArrayHistogram.fromByteBuffer(byteBuffer));
+ }
+
+ public static HistogramSketch wrapBytes(byte[] bytes) {
+ return new HistogramSketch(DirectMapHistogram.wrapBytes(bytes));
+ }
+
+ public static HistogramSketch wrapByteBuffer(ByteBuffer byteBuffer) {
+ return new HistogramSketch(DirectMapHistogram.wrapByteBuffer(byteBuffer));
+ }
+}
diff --git a/src/main/java/org/HdrHistogram/HistogramUnion.java b/src/main/java/org/HdrHistogram/HistogramUnion.java
new file mode 100644
index 0000000..8d5e085
--- /dev/null
+++ b/src/main/java/org/HdrHistogram/HistogramUnion.java
@@ -0,0 +1,48 @@
+package org.HdrHistogram;
+
+import java.nio.ByteBuffer;
+
+public class HistogramUnion {
+ public HistogramSketch impl;
+
+ public HistogramUnion(final int numberOfSignificantValueDigits){
+ impl = new HistogramSketch(numberOfSignificantValueDigits);
+ }
+
+ public HistogramUnion(final long lowestDiscernibleValue, final long highestTrackableValue,
+ final int numberOfSignificantValueDigits, final boolean autoResize){
+ impl = new HistogramSketch(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits, autoResize);
+ }
+
+ public HistogramUnion(final long lowestDiscernibleValue, final long highestTrackableValue,
+ final int numberOfSignificantValueDigits, ByteBuffer byteBuffer) {
+ impl = new HistogramSketch(new DirectArrayHistogram(lowestDiscernibleValue, highestTrackableValue, numberOfSignificantValueDigits, byteBuffer));
+ }
+
+ public HistogramUnion(HistogramSketch his) {
+ impl = his;
+ }
+
+ // druid内部使用
+ public void resetByteBuffer(ByteBuffer byteBuffer){
+ ((DirectArrayHistogram)impl.hisImpl).resetByteBuffer(byteBuffer);
+ }
+
+ public void reset() {
+ impl.reset();
+ }
+
+ public HistogramSketch getResult() {
+ return impl;
+ }
+
+ public void update(final HistogramSketch his) {
+ if(his != null){
+ impl.hisImpl = unionImpl(his, impl);
+ }
+ }
+
+ private static Histogramer unionImpl(HistogramSketch source, HistogramSketch dest) {
+ return dest.hisImpl.merge(source.hisImpl);
+ }
+}
diff --git a/src/main/java/org/HdrHistogram/Histogramer.java b/src/main/java/org/HdrHistogram/Histogramer.java
new file mode 100644
index 0000000..4901b1d
--- /dev/null
+++ b/src/main/java/org/HdrHistogram/Histogramer.java
@@ -0,0 +1,37 @@
+package org.HdrHistogram;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Map;
+
+public interface Histogramer {
+ long getTotalCount();
+
+ void recordValue(long value) throws RuntimeException;
+
+ void recordValueWithCount(long value, long count) throws RuntimeException;
+
+ long getValueAtPercentile(double percentile);
+
+ List<Percentile> percentileList(int percentileTicksPerHalfDistance);
+
+ Map<String, Object> describe();
+
+ Histogramer resetHistogram();
+
+ Histogramer merge(Histogramer histogram);
+
+ // 复制到堆内存实例ArrayHistogram
+ Histogramer makeCopy();
+
+ byte[] toBytes();
+
+ default byte[] byteBuffer2Bytes(ByteBuffer byteBuffer){
+ //必须调用完后flip()才可以调用此方法
+ byteBuffer.flip();
+ int len = byteBuffer.limit() - byteBuffer.position();
+ byte[] bytes = new byte[len];
+ byteBuffer.get(bytes);
+ return bytes;
+ }
+}
diff --git a/src/main/java/org/HdrHistogram/Percentile.java b/src/main/java/org/HdrHistogram/Percentile.java
new file mode 100644
index 0000000..ad70ca5
--- /dev/null
+++ b/src/main/java/org/HdrHistogram/Percentile.java
@@ -0,0 +1,50 @@
+package org.HdrHistogram;
+
+public class Percentile {
+ public long value;
+ public long count;
+ public double percentile;
+
+ public Percentile() {
+
+ }
+
+ public Percentile(long value, long count, double percentile) {
+ this.value = value;
+ this.count = count;
+ this.percentile = percentile;
+ }
+
+ public long getValue() {
+ return value;
+ }
+
+ public void setValue(long value) {
+ this.value = value;
+ }
+
+ public long getCount() {
+ return count;
+ }
+
+ public void setCount(long count) {
+ this.count = count;
+ }
+
+ public double getPercentile() {
+ return percentile;
+ }
+
+ public void setPercentile(double percentile) {
+ this.percentile = percentile;
+ }
+
+ @Override
+ public String toString() {
+ return "Percentile{" +
+ "value=" + value +
+ ", count=" + count +
+ ", percentile=" + percentile +
+ '}';
+ }
+}