Skip to content

Commit d0cb198

Browse files
Marcelo Vanzinsquito
authored andcommitted
[SPARK-23103][CORE] Ensure correct sort order for negative values in LevelDB.
The code was sorting "0" as "less than" negative values, which is a little wrong. Fix is simple, most of the changes are the added test and related cleanup. Author: Marcelo Vanzin <[email protected]> Closes #20284 from vanzin/SPARK-23103. (cherry picked from commit aa3a127) Signed-off-by: Imran Rashid <[email protected]>
1 parent 4b79514 commit d0cb198

File tree

4 files changed

+52
-42
lines changed

4 files changed

+52
-42
lines changed

common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBTypeInfo.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -493,7 +493,7 @@ byte[] toKey(Object value, byte prefix) {
493493
byte[] key = new byte[bytes * 2 + 2];
494494
long longValue = ((Number) value).longValue();
495495
key[0] = prefix;
496-
key[1] = longValue > 0 ? POSITIVE_MARKER : NEGATIVE_MARKER;
496+
key[1] = longValue >= 0 ? POSITIVE_MARKER : NEGATIVE_MARKER;
497497

498498
for (int i = 0; i < key.length - 2; i++) {
499499
int masked = (int) ((longValue >>> (4 * i)) & 0xF);

common/kvstore/src/test/java/org/apache/spark/util/kvstore/DBIteratorSuite.java

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,9 @@ default BaseComparator reverse() {
7373
private static final BaseComparator NATURAL_ORDER = (t1, t2) -> t1.key.compareTo(t2.key);
7474
private static final BaseComparator REF_INDEX_ORDER = (t1, t2) -> t1.id.compareTo(t2.id);
7575
private static final BaseComparator COPY_INDEX_ORDER = (t1, t2) -> t1.name.compareTo(t2.name);
76-
private static final BaseComparator NUMERIC_INDEX_ORDER = (t1, t2) -> t1.num - t2.num;
76+
private static final BaseComparator NUMERIC_INDEX_ORDER = (t1, t2) -> {
77+
return Integer.valueOf(t1.num).compareTo(t2.num);
78+
};
7779
private static final BaseComparator CHILD_INDEX_ORDER = (t1, t2) -> t1.child.compareTo(t2.child);
7880

7981
/**
@@ -112,7 +114,8 @@ public void setup() throws Exception {
112114
t.key = "key" + i;
113115
t.id = "id" + i;
114116
t.name = "name" + RND.nextInt(MAX_ENTRIES);
115-
t.num = RND.nextInt(MAX_ENTRIES);
117+
// Force one item to have an integer value of zero to test the fix for SPARK-23103.
118+
t.num = (i != 0) ? (int) RND.nextLong() : 0;
116119
t.child = "child" + (i % MIN_ENTRIES);
117120
allEntries.add(t);
118121
}

common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBSuite.java

Lines changed: 40 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@
2121
import java.util.Arrays;
2222
import java.util.List;
2323
import java.util.NoSuchElementException;
24+
import java.util.stream.Collectors;
25+
import java.util.stream.StreamSupport;
2426

2527
import org.apache.commons.io.FileUtils;
2628
import org.iq80.leveldb.DBIterator;
@@ -74,11 +76,7 @@ public void testReopenAndVersionCheckDb() throws Exception {
7476

7577
@Test
7678
public void testObjectWriteReadDelete() throws Exception {
77-
CustomType1 t = new CustomType1();
78-
t.key = "key";
79-
t.id = "id";
80-
t.name = "name";
81-
t.child = "child";
79+
CustomType1 t = createCustomType1(1);
8280

8381
try {
8482
db.read(CustomType1.class, t.key);
@@ -106,17 +104,9 @@ public void testObjectWriteReadDelete() throws Exception {
106104

107105
@Test
108106
public void testMultipleObjectWriteReadDelete() throws Exception {
109-
CustomType1 t1 = new CustomType1();
110-
t1.key = "key1";
111-
t1.id = "id";
112-
t1.name = "name1";
113-
t1.child = "child1";
114-
115-
CustomType1 t2 = new CustomType1();
116-
t2.key = "key2";
117-
t2.id = "id";
118-
t2.name = "name2";
119-
t2.child = "child2";
107+
CustomType1 t1 = createCustomType1(1);
108+
CustomType1 t2 = createCustomType1(2);
109+
t2.id = t1.id;
120110

121111
db.write(t1);
122112
db.write(t2);
@@ -142,11 +132,7 @@ public void testMultipleObjectWriteReadDelete() throws Exception {
142132

143133
@Test
144134
public void testMultipleTypesWriteReadDelete() throws Exception {
145-
CustomType1 t1 = new CustomType1();
146-
t1.key = "1";
147-
t1.id = "id";
148-
t1.name = "name1";
149-
t1.child = "child1";
135+
CustomType1 t1 = createCustomType1(1);
150136

151137
IntKeyType t2 = new IntKeyType();
152138
t2.key = 2;
@@ -188,10 +174,7 @@ public void testMultipleTypesWriteReadDelete() throws Exception {
188174
public void testMetadata() throws Exception {
189175
assertNull(db.getMetadata(CustomType1.class));
190176

191-
CustomType1 t = new CustomType1();
192-
t.id = "id";
193-
t.name = "name";
194-
t.child = "child";
177+
CustomType1 t = createCustomType1(1);
195178

196179
db.setMetadata(t);
197180
assertEquals(t, db.getMetadata(CustomType1.class));
@@ -202,11 +185,7 @@ public void testMetadata() throws Exception {
202185

203186
@Test
204187
public void testUpdate() throws Exception {
205-
CustomType1 t = new CustomType1();
206-
t.key = "key";
207-
t.id = "id";
208-
t.name = "name";
209-
t.child = "child";
188+
CustomType1 t = createCustomType1(1);
210189

211190
db.write(t);
212191

@@ -222,13 +201,7 @@ public void testUpdate() throws Exception {
222201
@Test
223202
public void testSkip() throws Exception {
224203
for (int i = 0; i < 10; i++) {
225-
CustomType1 t = new CustomType1();
226-
t.key = "key" + i;
227-
t.id = "id" + i;
228-
t.name = "name" + i;
229-
t.child = "child" + i;
230-
231-
db.write(t);
204+
db.write(createCustomType1(i));
232205
}
233206

234207
KVStoreIterator<CustomType1> it = db.view(CustomType1.class).closeableIterator();
@@ -240,6 +213,36 @@ public void testSkip() throws Exception {
240213
assertFalse(it.hasNext());
241214
}
242215

216+
@Test
217+
public void testNegativeIndexValues() throws Exception {
218+
List<Integer> expected = Arrays.asList(-100, -50, 0, 50, 100);
219+
220+
expected.stream().forEach(i -> {
221+
try {
222+
db.write(createCustomType1(i));
223+
} catch (Exception e) {
224+
throw new RuntimeException(e);
225+
}
226+
});
227+
228+
List<Integer> results = StreamSupport
229+
.stream(db.view(CustomType1.class).index("int").spliterator(), false)
230+
.map(e -> e.num)
231+
.collect(Collectors.toList());
232+
233+
assertEquals(expected, results);
234+
}
235+
236+
private CustomType1 createCustomType1(int i) {
237+
CustomType1 t = new CustomType1();
238+
t.key = "key" + i;
239+
t.id = "id" + i;
240+
t.name = "name" + i;
241+
t.num = i;
242+
t.child = "child" + i;
243+
return t;
244+
}
245+
243246
private int countKeys(Class<?> type) throws Exception {
244247
byte[] prefix = db.getTypeInfo(type).keyPrefix();
245248
int count = 0;

core/src/test/scala/org/apache/spark/status/AppStatusListenerSuite.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -894,15 +894,19 @@ class AppStatusListenerSuite extends SparkFunSuite with BeforeAndAfter {
894894
val dropped = stages.drop(1).head
895895

896896
// Cache some quantiles by calling AppStatusStore.taskSummary(). For quantiles to be
897-
// calculcated, we need at least one finished task.
897+
// calculated, we need at least one finished task. The code in AppStatusStore uses
898+
// `executorRunTime` to detect valid tasks, so that metric needs to be updated in the
899+
// task end event.
898900
time += 1
899901
val task = createTasks(1, Array("1")).head
900902
listener.onTaskStart(SparkListenerTaskStart(dropped.stageId, dropped.attemptId, task))
901903

902904
time += 1
903905
task.markFinished(TaskState.FINISHED, time)
906+
val metrics = TaskMetrics.empty
907+
metrics.setExecutorRunTime(42L)
904908
listener.onTaskEnd(SparkListenerTaskEnd(dropped.stageId, dropped.attemptId,
905-
"taskType", Success, task, null))
909+
"taskType", Success, task, metrics))
906910

907911
new AppStatusStore(store)
908912
.taskSummary(dropped.stageId, dropped.attemptId, Array(0.25d, 0.50d, 0.75d))

0 commit comments

Comments
 (0)