Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Improvement][Spark] Complement the error messages of spark SDK #278

Merged
merged 2 commits into from
Nov 8, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 54 additions & 14 deletions spark/src/main/scala/com/alibaba/graphar/EdgeInfo.scala
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,11 @@ class EdgeInfo() {
return str
}
}
throw new IllegalArgumentException
throw new IllegalArgumentException(
"adj list type not found: " + AdjListType.AdjListTypeToString(
adj_list_type
)
)
}

/**
Expand All @@ -95,7 +99,11 @@ class EdgeInfo() {
return adj_list.getFile_type_in_gar
}
}
throw new IllegalArgumentException
throw new IllegalArgumentException(
"adj list type not found: " + AdjListType.AdjListTypeToString(
adj_list_type
)
)
}

/**
Expand All @@ -117,7 +125,11 @@ class EdgeInfo() {
return adj_list.getProperty_groups
}
}
throw new IllegalArgumentException
throw new IllegalArgumentException(
"adj list type not found: " + AdjListType.AdjListTypeToString(
adj_list_type
)
)
}

/**
Expand Down Expand Up @@ -213,10 +225,18 @@ class EdgeInfo() {
return pg
}
}
throw new IllegalArgumentException(
"property group not found: " + property_name + " in adj list type: " + AdjListType
.AdjListTypeToString(
adj_list_type
)
)
}
}
}
throw new IllegalArgumentException
throw new IllegalArgumentException(
"adj list type or property group not found."
)
}

/**
Expand Down Expand Up @@ -245,7 +265,7 @@ class EdgeInfo() {
}
}
}
throw new IllegalArgumentException
throw new IllegalArgumentException("property not found: " + property_name)
}

/**
Expand Down Expand Up @@ -275,7 +295,7 @@ class EdgeInfo() {
}
}
}
throw new IllegalArgumentException
throw new IllegalArgumentException("property not found: " + property_name)
}

/** Get Primary key of edge info. */
Expand Down Expand Up @@ -337,7 +357,11 @@ class EdgeInfo() {
*/
def getVerticesNumFilePath(adj_list_type: AdjListType.Value): String = {
if (containAdjList(adj_list_type) == false) {
throw new IllegalArgumentException
throw new IllegalArgumentException(
"adj list type not found: " + AdjListType.AdjListTypeToString(
adj_list_type
)
)
}
val str: String = prefix + getAdjListPrefix(adj_list_type) + "vertex_count"
return str
Expand All @@ -354,7 +378,11 @@ class EdgeInfo() {
*/
def getEdgesNumPathPrefix(adj_list_type: AdjListType.Value): String = {
if (containAdjList(adj_list_type) == false) {
throw new IllegalArgumentException
throw new IllegalArgumentException(
"adj list type not found: " + AdjListType.AdjListTypeToString(
adj_list_type
)
)
}
val str: String = prefix + getAdjListPrefix(adj_list_type) + "edge_count"
return str
Expand All @@ -376,7 +404,11 @@ class EdgeInfo() {
adj_list_type: AdjListType.Value
): String = {
if (containAdjList(adj_list_type) == false) {
throw new IllegalArgumentException
throw new IllegalArgumentException(
"adj list type not found: " + AdjListType.AdjListTypeToString(
adj_list_type
)
)
}
val str: String = prefix + getAdjListPrefix(adj_list_type) + "edge_count" +
chunk_index.toString()
Expand All @@ -400,7 +432,11 @@ class EdgeInfo() {
adj_list_type: AdjListType.Value
): String = {
if (containAdjList(adj_list_type) == false) {
throw new IllegalArgumentException
throw new IllegalArgumentException(
"adj list type not found: " + AdjListType.AdjListTypeToString(
adj_list_type
)
)
}
val str: String =
prefix + getAdjListPrefix(adj_list_type) + "offset/chunk" +
Expand All @@ -419,7 +455,11 @@ class EdgeInfo() {
*/
def getOffsetPathPrefix(adj_list_type: AdjListType.Value): String = {
if (containAdjList(adj_list_type) == false) {
throw new IllegalArgumentException
throw new IllegalArgumentException(
"adj list type not found: " + AdjListType.AdjListTypeToString(
adj_list_type
)
)
}
return prefix + getAdjListPrefix(adj_list_type) + "offset/"
}
Expand Down Expand Up @@ -503,7 +543,7 @@ class EdgeInfo() {
chunk_index: Long
): String = {
if (containPropertyGroup(property_group, adj_list_type) == false)
throw new IllegalArgumentException
throw new IllegalArgumentException("property group not found.")
var str: String = property_group.getPrefix
if (str == "") {
val properties = property_group.getProperties
Expand Down Expand Up @@ -540,7 +580,7 @@ class EdgeInfo() {
vertex_chunk_index: Long
): String = {
if (containPropertyGroup(property_group, adj_list_type) == false)
throw new IllegalArgumentException
throw new IllegalArgumentException("property group not found.")
var str: String = property_group.getPrefix
if (str == "") {
val properties = property_group.getProperties
Expand Down Expand Up @@ -573,7 +613,7 @@ class EdgeInfo() {
adj_list_type: AdjListType.Value
): String = {
if (containPropertyGroup(property_group, adj_list_type) == false)
throw new IllegalArgumentException
throw new IllegalArgumentException("property group not found.")
var str: String = property_group.getPrefix
if (str == "") {
val properties = property_group.getProperties
Expand Down
12 changes: 6 additions & 6 deletions spark/src/main/scala/com/alibaba/graphar/GraphInfo.scala
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ object GarType extends Enumeration {
case GarType.DOUBLE => "double"
case GarType.STRING => "string"
case GarType.ARRAY => "array"
case _ => throw new IllegalArgumentException
case _ => throw new IllegalArgumentException("Unknown data type")
}

/**
Expand All @@ -82,7 +82,7 @@ object GarType extends Enumeration {
case "double" => GarType.DOUBLE
case "string" => GarType.STRING
case "array" => GarType.ARRAY
case _ => throw new IllegalArgumentException
case _ => throw new IllegalArgumentException("Unknown data type: " + str)
}
}

Expand All @@ -105,7 +105,7 @@ object FileType extends Enumeration {
case FileType.CSV => "csv"
case FileType.PARQUET => "parquet"
case FileType.ORC => "orc"
case _ => throw new IllegalArgumentException
case _ => throw new IllegalArgumentException("Unknown file type")
}

/**
Expand All @@ -120,7 +120,7 @@ object FileType extends Enumeration {
case "csv" => FileType.CSV
case "parquet" => FileType.PARQUET
case "orc" => FileType.ORC
case _ => throw new IllegalArgumentException
case _ => throw new IllegalArgumentException("Unknown file type: " + str)
}

}
Expand Down Expand Up @@ -155,7 +155,7 @@ object AdjListType extends Enumeration {
case AdjListType.unordered_by_dest => "unordered_by_dest"
case AdjListType.ordered_by_source => "ordered_by_source"
case AdjListType.ordered_by_dest => "ordered_by_dest"
case _ => throw new IllegalArgumentException
case _ => throw new IllegalArgumentException("Unknown adjList type")
}

/** String to adjList type in gar */
Expand All @@ -164,7 +164,7 @@ object AdjListType extends Enumeration {
case "unordered_by_dest" => AdjListType.unordered_by_dest
case "ordered_by_source" => AdjListType.ordered_by_source
case "ordered_by_dest" => AdjListType.ordered_by_dest
case _ => throw new IllegalArgumentException
case _ => throw new IllegalArgumentException("Unknown adjList type: " + str)
}
}

Expand Down
10 changes: 5 additions & 5 deletions spark/src/main/scala/com/alibaba/graphar/VertexInfo.scala
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ class VertexInfo() {
}
}
}
throw new IllegalArgumentException
throw new IllegalArgumentException("Property not found: " + property_name)
}

/**
Expand All @@ -117,7 +117,7 @@ class VertexInfo() {
}
}
}
throw new IllegalArgumentException
throw new IllegalArgumentException("Property not found: " + property_name)
}

/**
Expand All @@ -141,7 +141,7 @@ class VertexInfo() {
}
}
}
throw new IllegalArgumentException
throw new IllegalArgumentException("Property not found: " + property_name)
}

/**
Expand Down Expand Up @@ -205,7 +205,7 @@ class VertexInfo() {
*/
def getFilePath(property_group: PropertyGroup, chunk_index: Long): String = {
if (containPropertyGroup(property_group) == false) {
throw new IllegalArgumentException
throw new IllegalArgumentException("Property group not found.")
}
var str: String = ""
if (property_group.getPrefix == "") {
Expand Down Expand Up @@ -234,7 +234,7 @@ class VertexInfo() {
*/
def getPathPrefix(property_group: PropertyGroup): String = {
if (containPropertyGroup(property_group) == false) {
throw new IllegalArgumentException
throw new IllegalArgumentException("Property group not found.")
}
var str: String = ""
if (property_group.getPrefix == "") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ case class GarScan(
case "csv" => createCSVReaderFactory()
case "orc" => createOrcReaderFactory()
case "parquet" => createParquetReaderFactory()
case _ => throw new IllegalArgumentException
case _ =>
throw new IllegalArgumentException("Invalid format name: " + formatName)
}

// Create the reader factory for the CSV format.
Expand Down Expand Up @@ -269,7 +270,8 @@ case class GarScan(
case "csv" => super.hashCode()
case "orc" => getClass.hashCode()
case "parquet" => getClass.hashCode()
case _ => throw new IllegalArgumentException
case _ =>
throw new IllegalArgumentException("Invalid format name: " + formatName)
}

/** Get the description string of the object. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ case class GarScanBuilder(
case "csv" => Array.empty[Filter]
case "orc" => pushedOrcFilters
case "parquet" => pushedParquetFilters
case _ => throw new IllegalArgumentException
case _ =>
throw new IllegalArgumentException("Invalid format name: " + formatName)
}

private lazy val pushedParquetFilters: Array[Filter] = {
Expand Down Expand Up @@ -87,7 +88,8 @@ case class GarScanBuilder(
case "orc" => sparkSession.sessionState.conf.nestedSchemaPruningEnabled
case "parquet" =>
sparkSession.sessionState.conf.nestedSchemaPruningEnabled
case _ => throw new IllegalArgumentException
case _ =>
throw new IllegalArgumentException("Invalid format name: " + formatName)
}

/** Build the file scan for GarDataSource. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,8 @@ case class GarTable(
OrcUtils.inferSchema(sparkSession, files, options.asScala.toMap)
case "parquet" =>
ParquetUtils.inferSchema(sparkSession, options.asScala.toMap, files)
case _ => throw new IllegalArgumentException
case _ =>
throw new IllegalArgumentException("Invalid format name: " + formatName)
}

/** Construct a new write builder according to the actual file format. */
Expand All @@ -93,7 +94,8 @@ case class GarTable(
new OrcWriteBuilder(paths, formatName, supportsDataType, info)
case "parquet" =>
new ParquetWriteBuilder(paths, formatName, supportsDataType, info)
case _ => throw new IllegalArgumentException
case _ =>
throw new IllegalArgumentException("Invalid format name: " + formatName)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,9 @@ object GraphTransformer {
// load source vertex info
val label = dest_vertex_info.getLabel()
if (!sourceVertexInfosMap.contains(label)) {
throw new IllegalArgumentException
throw new IllegalArgumentException(
"vertex info of " + label + " not found in graph info."
)
}
val source_vertex_info = sourceVertexInfosMap(label)
// read vertex chunks from the source graph
Expand Down Expand Up @@ -105,7 +107,9 @@ object GraphTransformer {
// load source edge info
val key = dest_edge_info.getConcatKey()
if (!sourceEdgeInfosMap.contains(key)) {
throw new IllegalArgumentException
throw new IllegalArgumentException(
"edge info of " + key + " not found in graph info."
)
}
val source_edge_info = sourceEdgeInfosMap(key)
var has_loaded = false
Expand Down Expand Up @@ -146,7 +150,9 @@ object GraphTransformer {
dest_edge_info.getDst_label
}
if (!sourceVertexInfosMap.contains(vertex_label)) {
throw new IllegalArgumentException
throw new IllegalArgumentException(
"vertex info of " + vertex_label + " not found in graph info."
)
}
val vertex_info = sourceVertexInfosMap(vertex_label)
val reader = new VertexReader(source_prefix, vertex_info, spark)
Expand Down
10 changes: 7 additions & 3 deletions spark/src/main/scala/com/alibaba/graphar/graph/GraphWriter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@ class GraphWriter() {
primaryKey: String = ""
): Unit = {
if (vertices.exists(_._1 == label)) {
throw new IllegalArgumentException
throw new IllegalArgumentException(
"Vertex data of label " + label + " has been put."
)
}
vertices += label -> df
vertexNums += label -> df.count
Expand All @@ -63,7 +65,9 @@ class GraphWriter() {
*/
def PutEdgeData(relation: (String, String, String), df: DataFrame): Unit = {
if (edges.exists(_._1 == relation)) {
throw new IllegalArgumentException
throw new IllegalArgumentException(
"Edge data of relation " + relation + " has been put."
)
}
edges += relation -> df
}
Expand Down Expand Up @@ -195,7 +199,7 @@ class GraphWriter() {
case (key, df) => {
edge_schemas += key -> new StructType(
df.schema.drop(2).toArray
) // drop the src, dst fileds
) // drop the src, dst fields
}
}
val graph_info = Utils.generateGraphInfo(
Expand Down
Loading