Skip to content

Commit fef6605

Browse files
committed
some utils for working w/ new "enum" format
1 parent dbfc7bf commit fef6605

File tree

3 files changed

+169
-2
lines changed

3 files changed

+169
-2
lines changed

core/src/main/scala/org/apache/spark/status/api/v1/CustomObjectMapper.scala

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,11 @@ import javax.ws.rs.core.MediaType
2323
import javax.ws.rs.ext.{ContextResolver, Provider}
2424

2525
import com.fasterxml.jackson.annotation.JsonInclude
26-
import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature}
26+
import com.fasterxml.jackson.core.JsonGenerator
27+
import com.fasterxml.jackson.databind.{JsonSerializer, ObjectMapper, SerializationFeature, SerializerProvider}
28+
import com.fasterxml.jackson.databind.module.SimpleModule
29+
30+
import org.apache.spark.util.SparkEnum
2731

2832
@Provider
2933
@Produces(Array(MediaType.APPLICATION_JSON))
@@ -38,10 +42,13 @@ class CustomObjectMapper extends ContextResolver[ObjectMapper]{
3842
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
3943
mapper.setDateFormat(CustomObjectMapper.makeISODateFormat)
4044

45+
val sparkEnumModule = new SimpleModule()
46+
sparkEnumModule.addSerializer(classOf[SparkEnum], new SparkEnumSerializer)
47+
mapper.registerModule(sparkEnumModule)
48+
4149
override def getContext(tpe: Class[_]): ObjectMapper = {
4250
mapper
4351
}
44-
4552
}
4653

4754
object CustomObjectMapper {
@@ -51,5 +58,10 @@ object CustomObjectMapper {
5158
iso8601.setCalendar(cal);
5259
iso8601;
5360
}
61+
}
5462

63+
class SparkEnumSerializer extends JsonSerializer[SparkEnum] {
64+
def serialize(se: SparkEnum, jgen: JsonGenerator, provider: SerializerProvider): Unit = {
65+
jgen.writeString(se.toString)
66+
}
5567
}
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
package org.apache.spark.util
18+
19+
import org.apache.spark.SparkException
20+
21+
/**
22+
* Util for the "enum" pattern we have adopted
23+
*/
24+
private[spark] trait SparkEnum {
25+
override def toString: String = {
26+
val simpleName = getClass().getSimpleName()
27+
val a = simpleName.indexOf('$')
28+
simpleName.substring(0,a)
29+
}
30+
31+
}
32+
33+
/**
34+
* Util for the "enum" pattern we've adopted. It adds methods to parse the enum from a String.
35+
* Note that you must still manually keep {{values}} in sync with the values you create.
36+
*/
37+
private[spark] trait SparkEnumCompanion[T <: SparkEnum] {
38+
val values: Seq[T]
39+
40+
lazy val enumNames: Map[String, T] = {
41+
try {
42+
val tmpMap = values.map { t =>
43+
t.toString -> t
44+
}.toMap
45+
if (tmpMap.size != values.size) {
46+
throw new SparkException("It appears you have multiple constants with the same" +
47+
" name. Perhaps your naming scheme is incompatible with SparkEnum. found names: " +
48+
tmpMap.keys)
49+
}
50+
tmpMap
51+
} catch {
52+
case ie: InternalError =>
53+
throw new SparkException("It appears you are using SparkEnum in a class which does not " +
54+
"follow the naming conventions")
55+
}
56+
}
57+
58+
59+
def parse(s:String): Option[T] = {
60+
enumNames.get(s)
61+
}
62+
63+
def parseIgnoreCase(s: String): Option[T] = {
64+
enumNames.find { case (k, v) =>
65+
k.toLowerCase() == s.toLowerCase()
66+
}.map{_._2}
67+
}
68+
69+
}
Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
package org.apache.spark.util
18+
19+
import org.scalatest.{Matchers, FunSuite}
20+
21+
import org.apache.spark.SparkException
22+
23+
class SparkEnumTest extends FunSuite with Matchers {
24+
25+
test("toString") {
26+
DummyEnum.Foo.toString should be ("Foo")
27+
DummyEnum.Bar.toString should be ("Bar")
28+
}
29+
30+
test("parse") {
31+
DummyEnum.parse("Foo") should be (Some(DummyEnum.Foo))
32+
DummyEnum.parse("Bar") should be (Some(DummyEnum.Bar))
33+
34+
DummyEnum.parse("") should be (None)
35+
DummyEnum.parse("foo") should be (None)
36+
DummyEnum.parse("bar") should be (None)
37+
}
38+
39+
40+
test("bad enums") {
41+
val ex = intercept[SparkException](BadEnum.enumNames)
42+
// I get different errors on each run, not sure why, but either is fine.
43+
ex.getMessage should (be ("It appears you have multiple constants with the same name. " +
44+
"Perhaps your naming scheme is incompatible with SparkEnum. found names: Set(Bippy)") or
45+
be ("It appears you are using SparkEnum in a class which does not follow the naming" +
46+
" conventions"))
47+
}
48+
49+
test("parseIgnoreCase") {
50+
DummyEnum.parseIgnoreCase("Foo") should be (Some(DummyEnum.Foo))
51+
DummyEnum.parseIgnoreCase("Bar") should be (Some(DummyEnum.Bar))
52+
53+
DummyEnum.parseIgnoreCase("") should be (None)
54+
DummyEnum.parseIgnoreCase("foo") should be (Some(DummyEnum.Foo))
55+
DummyEnum.parseIgnoreCase("bar") should be (Some(DummyEnum.Bar))
56+
}
57+
}
58+
59+
60+
sealed abstract class DummyEnum extends SparkEnum
61+
62+
object DummyEnum extends SparkEnumCompanion[DummyEnum] {
63+
final val Foo = {
64+
case object Foo extends DummyEnum
65+
Foo
66+
}
67+
final val Bar = {
68+
case object Bar extends DummyEnum
69+
Bar
70+
}
71+
val values = Seq(
72+
Foo,
73+
Bar
74+
)
75+
}
76+
77+
sealed abstract class BadEnum extends SparkEnum
78+
79+
object BadEnum extends SparkEnumCompanion[BadEnum] {
80+
case object Bippy extends BadEnum
81+
object Blah {
82+
case object Bippy extends BadEnum
83+
}
84+
85+
val values = Seq(Bippy, Blah.Bippy)
86+
}

0 commit comments

Comments
 (0)