Skip to content

Commit 6865612

Browse files
ArnauPratJoan Guisado-Gámez
authored and
Joan Guisado-Gámez
committed
Fixed bug at RuntimeTimePropertyGeneratorBuilder (#25)
* Fixed bug with visit method of EdgeTable and Match, which should not throw an exception Fixed bug at InstantiatPropertyGeneratorOperator to fetch the correct jar Added test for a complete execution of the generator * Removed commented code at DataSynthTest.scala * Updated travis.yml, expecting tests to pass...
1 parent 1aafff8 commit 6865612

File tree

5 files changed

+103
-51
lines changed

5 files changed

+103
-51
lines changed

.travis.yml

+2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
1+
dist: trusty
12
language: scala
3+
sudo: required
24
scala:
35
- 2.11.2
46
jdk:

src/main/scala/org/dama/datasynth/runtime/spark/operators/InstantiatePropertyGeneratorOperator.scala

+4-2
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,10 @@ class InstantiatePropertyGeneratorOperator {
2424
* @return The instantiated property generator
2525
*/
2626
def apply[T](propertyTableName : String, info : ExecutionPlan.PropertyGenerator[T]) : PropertyGeneratorWrapper[T] = {
27-
val initParameters : Seq[Object] = info.initParameters.map( x => SparkRuntime.evalValueOperator(x)).map( ref => ref.asInstanceOf[Object])
28-
val urlCL = new URLClassLoader( Array[URL](new URL("file:///tmp/temp.jar")), getClass.getClassLoader());
27+
val initParameters : Seq[Object] = info.initParameters.map( x => SparkRuntime.evalValueOperator(x))
28+
.map( ref => ref.asInstanceOf[Object])
29+
val jarUrl = "file://"+SparkRuntime.getConfig().driverWorkspaceDir+"/temp.jar"
30+
val urlCL = new URLClassLoader( Array[URL]( new URL(jarUrl)),getClass.getClassLoader());
2931
val constructor = urlCL.loadClass(info.className).getConstructors()(0)
3032
val generator = constructor.newInstance(initParameters : _*).asInstanceOf[PropertyGenerator[T]]
3133
val rndGen = SparkRuntime.fetchRndGeneratorOperator(propertyTableName)

src/main/scala/org/dama/datasynth/runtime/spark/passes/RuntimePropertyGeneratorBuilder.scala

+26-49
Original file line numberDiff line numberDiff line change
@@ -19,16 +19,9 @@ import scala.reflect.runtime.universe.typeOf
1919
*/
2020
class RuntimePropertyGeneratorBuilder(config : DataSynthConfig) extends ExecutionPlanNonVoidVisitor[RuntimeClasses] {
2121

22-
23-
def codePropertyTableClasses(executionPlan:Seq[ExecutionPlan.Table]): RuntimeClasses={
22+
def codePropertyTableClasses(executionPlan : Seq[ExecutionPlan.Table]) : RuntimeClasses = {
2423
/** Writing .scala files **/
25-
/* val classes:Map[String,(String,String)] = executionPlan.foldLeft(
26-
Map[String,(String,String)]())( (currentDeclarations, nextNode) => currentDeclarations ++ nextNode.accept(this))
27-
classes
28-
*/
29-
val classes2:RuntimeClasses=executionPlan.foldLeft(new RuntimeClasses)((codeClassses, nextNode)=>codeClassses++nextNode.accept(this))
30-
classes2
31-
24+
executionPlan.foldLeft(new RuntimeClasses)((codeClassses, nextNode) => codeClassses++nextNode.accept(this))
3225
}
3326

3427
/**
@@ -43,35 +36,38 @@ class RuntimePropertyGeneratorBuilder(config : DataSynthConfig) extends Executio
4336
settings.nc.value = true
4437
settings.usejavacp.value = true
4538
settings.outputDirs.setSingleOutput(s"${config.driverWorkspaceDir}")
46-
val currentJarPath : String = getClass().getProtectionDomain().getCodeSource().getLocation().getPath()
47-
println(currentJarPath)
39+
val currentJarPath : String = getClass()
40+
.getProtectionDomain()
41+
.getCodeSource()
42+
.getLocation()
43+
.getPath()
4844
settings.classpath.append(currentJarPath)
4945
val g = new Global(settings)
5046
val run = new g.Run
5147

52-
53-
54-
55-
val sourceFileNames : List[String] = classes.toList.map({case (className,classCode)=>{
56-
val fileName:String = s"${config.driverWorkspaceDir}/$className.scala"
57-
val writer = new PrintWriter(new java.io.File(fileName))
58-
writer.write(classCode)
59-
writer.close()
60-
61-
fileName
62-
}})
63-
48+
// Creating .scala source code files
49+
val sourceFileNames : List[String] = classes.toList.map(
50+
{
51+
case (className,classCode) => {
52+
val fileName : String = s"${config.driverWorkspaceDir}/$className.scala"
53+
val writer = new PrintWriter(new java.io.File(fileName))
54+
writer.write(classCode)
55+
writer.close()
56+
fileName
57+
}
58+
}
59+
)
60+
61+
// Compiling the .scala source code files
6462
run.compile(sourceFileNames)
6563

66-
/** Building JAR **/
64+
// Building JAR
6765
val jar : JarWriter = new JarWriter(new scala.reflect.io.File(new java.io.File(jarFileName)), new JManifest())
68-
sourceFileNames.map( file => file.replace(".scala",".class")).foreach( file => jar.addFile(new scala.reflect.io.File( new java.io.File(file)),""))
66+
sourceFileNames.map( file => file.replace(".scala",".class"))
67+
.foreach( file => jar.addFile(new scala.reflect.io.File(new java.io.File(file)), ""))
6968
jar.close()
7069
}
7170

72-
73-
74-
7571
/**
7672
* Generates the name of a generated proeprty generator given an original generator name
7773
* @param name The name of the original property generator
@@ -113,7 +109,6 @@ class RuntimePropertyGeneratorBuilder(config : DataSynthConfig) extends Executio
113109
}
114110
val propertyGeneratorName:String = propertyGenerator.className
115111
val generatedClassName:String = generateClassName(className)
116-
// val dependentGenerators = propertyGenerator.dependentPropertyTables.map(table => generateClassName(table.name))
117112
val dependentGeneratorsCallList : String = propertyGenerator.dependentPropertyTables.zipWithIndex.foldLeft("")(
118113
{case (current,(table,index)) => s"$current,${mkMatch(table,index)}"})
119114

@@ -126,54 +121,36 @@ class RuntimePropertyGeneratorBuilder(config : DataSynthConfig) extends Executio
126121
}
127122

128123
override def visit(node: ExecutionPlan.StaticValue[_]): RuntimeClasses = {
129-
// Map.empty
130124
throw new RuntimeException("No code should be generated for StaticValue[_]")
131125
}
132126

133127
override def visit(node: ExecutionPlan.PropertyGenerator[_]): RuntimeClasses = {
134-
// Map.empty
135128
throw new RuntimeException("No code should be generated for PropertyGenerator[_]")
136129
}
137130

138131
override def visit(node: ExecutionPlan.StructureGenerator): RuntimeClasses = {
139-
// Map.empty
140132
throw new RuntimeException("No code should be generated for StructureGenerator")
141133
}
142134

143-
/*override def visit(node: ExecutionPlan.PropertyTable[_]): Map[String, (String,String)] = {
144-
val classTypeName = generateClassName(node.name)
145-
val classDeclaration = generatePGClassDefinition(classTypeName,node.generator)
146-
val dependants = node.generator.dependentPropertyTables.map( table => table.accept[Map[String,(String,String)]](this) ).
147-
foldLeft(Map[String,(String,String)]())( {case (accumulated, next) => accumulated ++ next} )
148-
dependants + (node.name -> (classTypeName,classDeclaration))
149-
150-
}*/
151-
152135
override def visit(node: ExecutionPlan.PropertyTable[_]): RuntimeClasses = {
153136
val classTypeName = generateClassName(node.name)
154137
val classDeclaration = generatePGClassDefinition(classTypeName,node.generator)
155138

156-
157139
val dependants:RuntimeClasses = node.generator.dependentPropertyTables.map(table => table.accept[RuntimeClasses](this) ).
158140
foldLeft(new RuntimeClasses())( {case (accumulated, next) => accumulated ++ next} )
159141
val codeClass : RuntimeClass = new RuntimeClass(node.name,classTypeName,classDeclaration)
160142
dependants + codeClass
161-
dependants
162143
}
163144

164-
165145
override def visit(node: ExecutionPlan.EdgeTable): RuntimeClasses = {
166-
// Map.empty
167-
throw new RuntimeException("No code should be generated for EdgeTable")
146+
new RuntimeClasses()
168147
}
169148

170149
override def visit(node: ExecutionPlan.TableSize): RuntimeClasses = {
171-
// Map.empty
172150
throw new RuntimeException("No code should be generated for TableSize")
173151
}
174152

175153
override def visit(node: ExecutionPlan.Match): RuntimeClasses = {
176-
// Map.empty
177-
throw new RuntimeException("No code should be generated for Match")
154+
new RuntimeClasses()
178155
}
179156
}

src/test/resources/test.json

+37
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
{
2+
"nodeTypes" : [
3+
{
4+
"name" : "entity1",
5+
"instances" : 1000000,
6+
"properties" : [
7+
{
8+
"name": "attribute1",
9+
"dataType": "Long",
10+
"generator": {
11+
"name":"org.dama.datasynth.common.generators.property.empirical.IntGenerator",
12+
"dependencies":[],
13+
"initParameters" : ["src/main/resources/distributions/intDistribution.txt:String"," :String"]}
14+
},
15+
{
16+
"name": "attribute2",
17+
"dataType": "Long",
18+
"generator": {
19+
"name":"org.dama.datasynth.common.generators.property.empirical.IntGenerator",
20+
"dependencies":[],
21+
"initParameters" : ["src/main/resources/distributions/intDistribution.txt:String"," :String"]}
22+
}
23+
]
24+
}
25+
],
26+
"edgeTypes" : [
27+
{
28+
"name" : "edge1",
29+
"source" : "entity1",
30+
"target" : "entity1",
31+
"structure" : {
32+
"name" : "org.dama.datasynth.common.generators.structure.BTERGenerator",
33+
"initParameters" : ["src/main/resources/degrees/dblp:String","src/main/resources/ccs/dblp:String"]
34+
}
35+
}
36+
]
37+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
package org.dama.datasynth
2+
3+
import java.io.File
4+
import java.nio.file.{Files, Path, Paths}
5+
6+
import org.apache.commons.io.FileUtils
7+
import org.apache.spark.sql.SparkSession
8+
import org.junit.runner.RunWith
9+
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
10+
import org.scalatest.junit.JUnitRunner
11+
12+
/**
13+
* Created by aprat on 17/07/17.
14+
*/
15+
@RunWith(classOf[JUnitRunner])
16+
class DataSynthTest extends FlatSpec with Matchers with BeforeAndAfterAll {
17+
18+
19+
" The test schema at /src/test/resources/test.json should work " should " work " in {
20+
21+
SparkSession.builder().master("local[*]").getOrCreate()
22+
23+
val testFolder = new File("./test")
24+
val dataFolder = new File("./test/data")
25+
val workspaceFolder = new File("./test/workspace")
26+
testFolder.mkdir()
27+
dataFolder.mkdir()
28+
workspaceFolder.mkdir()
29+
DataSynth.main(List("--output-dir", dataFolder.getAbsolutePath,
30+
"--driver-workspace-dir", workspaceFolder.getAbsolutePath,
31+
"--schema-file", "src/test/resources/test.json").toArray)
32+
FileUtils.deleteDirectory(testFolder)
33+
}
34+
}

0 commit comments

Comments
 (0)