Skip to content

Commit ac8f22a

Browse files
committed
Remove the dependency on fastutil, use hppc instead.
1 parent abc5366 commit ac8f22a

File tree

2 files changed

+29
-26
lines changed

2 files changed

+29
-26
lines changed

pom.xml

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -289,12 +289,6 @@
289289
<optional>true</optional>
290290
</dependency>
291291
-->
292-
293-
<dependency>
294-
<groupId>it.unimi.dsi</groupId>
295-
<artifactId>fastutil</artifactId>
296-
<version>6.5.12</version>
297-
</dependency>
298292
</dependencies>
299293

300294
<build>

src/main/java/org/elasticsearch/search/aggregations/metrics/percentile/qdigest/QDigestState.java

Lines changed: 29 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
package org.elasticsearch.search.aggregations.metrics.percentile.qdigest;
22

3-
import it.unimi.dsi.fastutil.Hash;
4-
import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap;
5-
import it.unimi.dsi.fastutil.longs.LongArrayFIFOQueue;
3+
import com.carrotsearch.hppc.LongArrayDeque;
4+
import com.carrotsearch.hppc.LongLongOpenHashMap;
5+
import com.carrotsearch.hppc.cursors.LongLongCursor;
66
import org.apache.lucene.util.CollectionUtil;
77
import org.apache.lucene.util.RamUsageEstimator;
88
import org.elasticsearch.common.io.stream.StreamInput;
@@ -74,13 +74,13 @@ public int compare(long[] ra, long[] rb) {
7474
}
7575
};
7676

77-
private static final int MAP_INITIAL_SIZE = Hash.DEFAULT_INITIAL_SIZE;
78-
private static final float MAP_LOAD_FACTOR = Hash.VERY_FAST_LOAD_FACTOR;
77+
private static final int MAP_INITIAL_SIZE = 16;
78+
private static final float MAP_LOAD_FACTOR = 0.5f;
7979

8080
private long size;
8181
private long capacity = 1;
8282
private double compressionFactor;
83-
private Long2LongOpenHashMap node2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR);
83+
private LongLongOpenHashMap node2count = new LongLongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR);
8484

8585
public QDigestState(double compressionFactor) {
8686
this.compressionFactor = compressionFactor;
@@ -168,15 +168,17 @@ public static QDigestState unionOf(QDigestState a, QDigestState b) {
168168
QDigestState res = new QDigestState(a.compressionFactor);
169169
res.capacity = a.capacity;
170170
res.size = a.size + b.size;
171-
for (long k : a.node2count.keySet()) {
171+
for (LongLongCursor cursor : a.node2count) {
172+
final long k = cursor.key;
172173
res.node2count.put(k, a.node2count.get(k));
173174
}
174175

175176
if (b.capacity > res.capacity) {
176177
res.rebuildToCapacity(b.capacity);
177178
}
178179

179-
for (long k : b.node2count.keySet()) {
180+
for (LongLongCursor cursor : b.node2count) {
181+
final long k = cursor.key;
180182
res.node2count.put(k, b.get(k) + res.get(k));
181183
}
182184

@@ -186,7 +188,7 @@ public static QDigestState unionOf(QDigestState a, QDigestState b) {
186188
}
187189

188190
private void rebuildToCapacity(long newCapacity) {
189-
Long2LongOpenHashMap newNode2count = new Long2LongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR);
191+
LongLongOpenHashMap newNode2count = new LongLongOpenHashMap(MAP_INITIAL_SIZE, MAP_LOAD_FACTOR);
190192
// rebuild to newLogCapacity.
191193
// This means that our current tree becomes a leftmost subtree
192194
// of the new tree.
@@ -198,7 +200,12 @@ private void rebuildToCapacity(long newCapacity) {
198200
// This is easy to see if you draw it on paper.
199201
// Process the keys by "layers" in the original tree.
200202
long scaleR = newCapacity / capacity - 1;
201-
Long[] keys = node2count.keySet().toArray(new Long[node2count.size()]);
203+
final long[] keys = new long[node2count.size()];
204+
int i = 0;
205+
for (LongLongCursor cursor : node2count) {
206+
final long k = cursor.key;
207+
keys[i++] = k;
208+
}
202209
Arrays.sort(keys);
203210
long scaleL = 1;
204211
for (long k : keys) {
@@ -214,8 +221,8 @@ private void rebuildToCapacity(long newCapacity) {
214221

215222
private void compressFully() {
216223
// Restore property 2 at each node.
217-
Long[] allNodes = node2count.keySet().toArray(new Long[node2count.size()]);
218-
for (long node : allNodes) {
224+
for (LongLongCursor cursor : node2count) {
225+
final long node = cursor.key;
219226
compressDownward(node);
220227
}
221228
}
@@ -257,10 +264,10 @@ private void compressUpward(long node) {
257264
private void compressDownward(long seedNode) {
258265
double threshold = Math.floor(size / compressionFactor);
259266
// P2 check same as above but shorter and slower (and invoked rarely)
260-
LongArrayFIFOQueue q = new LongArrayFIFOQueue();
261-
q.enqueue(seedNode);
267+
LongArrayDeque q = new LongArrayDeque();
268+
q.addLast(seedNode);
262269
while (!q.isEmpty()) {
263-
long node = q.dequeueLong();
270+
long node = q.removeFirst();
264271
long atNode = get(node);
265272
long atSibling = get(sibling(node));
266273
if (atNode == 0 && atSibling == 0) {
@@ -275,8 +282,8 @@ private void compressDownward(long seedNode) {
275282
node2count.remove(sibling(node));
276283
// Now P2 could have vanished at the node's and sibling's subtrees since they decreased.
277284
if (!isLeaf(node)) {
278-
q.enqueue(leftChild(node));
279-
q.enqueue(leftChild(sibling(node)));
285+
q.addLast(leftChild(node));
286+
q.addLast(leftChild(sibling(node)));
280287
}
281288
}
282289
}
@@ -303,7 +310,8 @@ public boolean isEmpty() {
303310

304311
public List<long[]> toAscRanges() {
305312
List<long[]> ranges = new ArrayList<long[]>();
306-
for (long key : node2count.keySet()) {
313+
for (LongLongCursor cursor : node2count) {
314+
final long key = cursor.key;
307315
ranges.add(new long[]{rangeLeft(key), rangeRight(key), node2count.get(key)});
308316
}
309317

@@ -323,7 +331,8 @@ public static void write(QDigestState state, StreamOutput out) throws IOExceptio
323331
out.writeLong(state.size);
324332
out.writeLong(state.capacity);
325333
out.writeInt(state.node2count.size());
326-
for (long k : state.node2count.keySet()) {
334+
for (LongLongCursor cursor : state.node2count) {
335+
final long k = cursor.key;
327336
out.writeVLong(k);
328337
out.writeVLong(state.node2count.get(k));
329338
}
@@ -334,7 +343,7 @@ public static QDigestState read(StreamInput in) throws IOException {
334343
state.size = in.readLong();
335344
state.capacity = in.readLong();
336345
int count = in.readInt();
337-
state.node2count = new Long2LongOpenHashMap(count, MAP_LOAD_FACTOR);
346+
state.node2count = new LongLongOpenHashMap(count, MAP_LOAD_FACTOR);
338347
for (int i = 0; i < count; ++i) {
339348
long k = in.readVLong();
340349
long n = in.readVLong();

0 commit comments

Comments
 (0)