diff --git a/.gitignore b/.gitignore
index 29bf91f..ac3dd9d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,3 +2,4 @@
/*.iml
*.jj
target
+/.vscode/
diff --git a/pom.xml b/pom.xml
index a5394a9..5e94e47 100644
--- a/pom.xml
+++ b/pom.xml
@@ -36,6 +36,7 @@
4.13.2
2.42.0
3.1.1
+ 3.5.0
@@ -69,6 +70,12 @@
truth
${truth.version}
test
+
+
+ com.google.guava
+ guava
+
+
@@ -100,6 +107,8 @@
${exec-maven.version}
+
+
jjt-2-concat
exec
@@ -189,6 +198,26 @@
+
+ org.codehaus.mojo
+ build-helper-maven-plugin
+ ${build-helper-maven-plugin.version}
+
+
+ add-source
+
+ add-source
+
+ generate-sources
+
+
+
+
+
+
+
+
+
diff --git a/src/main/java/com/google/cloud/solutions/spannerddl/diff/ASTTreeUtils.java b/src/main/java/com/google/cloud/solutions/spannerddl/diff/ASTTreeUtils.java
index 457e2c1..fca12a1 100644
--- a/src/main/java/com/google/cloud/solutions/spannerddl/diff/ASTTreeUtils.java
+++ b/src/main/java/com/google/cloud/solutions/spannerddl/diff/ASTTreeUtils.java
@@ -75,11 +75,20 @@ private ASTTreeUtils() {}
* spacing and capitalization of tokens.
*/
public static String tokensToString(Token firstToken, Token lastToken) {
+ return tokensToString(firstToken, lastToken, true);
+ }
+
+ /**
+ * Generate the original parsed text between the 2 specified tokens, normalizing the text with
+ * spacing and optional capitalization of reserved words.
+ */
+ public static String tokensToString(
+ Token firstToken, Token lastToken, boolean upperCaseReserved) {
StringBuilder sb = new StringBuilder();
Token t = firstToken;
while (t != lastToken) {
String tok = t.toString();
- sb.append(isReservedWord(tok) ? tok.toUpperCase() : tok);
+ sb.append(isReservedWord(tok) && upperCaseReserved ? tok.toUpperCase() : tok);
if (t.next != null
&& !t.next.toString().equals(",")
@@ -91,15 +100,23 @@ public static String tokensToString(Token firstToken, Token lastToken) {
}
// append last token
String tok = t.toString();
- sb.append(isReservedWord(tok) ? tok.toUpperCase() : tok);
+ sb.append(isReservedWord(tok) && upperCaseReserved ? tok.toUpperCase() : tok);
return sb.toString();
}
/**
* Generate the original parsed text of the node, normalizing the text with spacing and
- * capitalization of tokens.
+ * capitalization of reserved words.
*/
public static String tokensToString(SimpleNode node) {
- return tokensToString(node.jjtGetFirstToken(), node.jjtGetLastToken());
+ return tokensToString(node, true);
+ }
+
+ /**
+ * Generate the original parsed text of the node, normalizing the text with spacing and optional
+ * capitalization of reserved words.
+ */
+ public static String tokensToString(SimpleNode node, boolean upperCaseReserved) {
+ return tokensToString(node.jjtGetFirstToken(), node.jjtGetLastToken(), upperCaseReserved);
}
}
diff --git a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTforeign_key.java b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTforeign_key.java
index 549b69b..1dd8a87 100644
--- a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTforeign_key.java
+++ b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTforeign_key.java
@@ -48,7 +48,7 @@ public List getConstrainedColumnNames() {
private List identifierListToStringList(ASTidentifier_list idList) {
return Arrays.stream(idList.children)
- .map(o -> ASTTreeUtils.tokensToString((ASTidentifier) o))
+ .map(o -> ASTTreeUtils.tokensToString((ASTidentifier) o, false))
.collect(Collectors.toList());
}
@@ -57,7 +57,7 @@ public String getReferencedTableName() {
if (children[0] instanceof ASTconstraint_name) {
child++;
}
- return ASTTreeUtils.tokensToString((ASTreferenced_table) children[child]);
+ return ASTTreeUtils.tokensToString((ASTreferenced_table) children[child], false);
}
public List getReferencedColumnNames() {
diff --git a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTgeneration_clause.java b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTgeneration_clause.java
index 4c76d3d..e5feffc 100644
--- a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTgeneration_clause.java
+++ b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTgeneration_clause.java
@@ -27,6 +27,6 @@ public ASTgeneration_clause(DdlParser p, int id) {
public String toString() {
final ASTexpression exp = (ASTexpression) children[0];
- return " AS ( " + exp.toString() + " ) STORED";
+ return "AS ( " + exp.toString() + " ) STORED";
}
}
diff --git a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTkey_part.java b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTkey_part.java
index f4bafef..c811caa 100644
--- a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTkey_part.java
+++ b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTkey_part.java
@@ -16,8 +16,6 @@
package com.google.cloud.solutions.spannerddl.parser;
-import com.google.cloud.solutions.spannerddl.diff.ASTTreeUtils;
-
/** Abstract Syntax Tree parser object for "key_part" token */
public class ASTkey_part extends SimpleNode {
@@ -35,13 +33,11 @@ public String toString() {
return jjtGetFirstToken().toString();
}
if (children.length == 1) {
- return ASTTreeUtils.tokensToString((ASTpath) children[0])
- + " ASC"; // key name without direction ;
+
+ return ((ASTpath) children[0]).toString() + " ASC"; // key name without direction ;
} else {
// key name and ASC/DESC
- return ASTTreeUtils.tokensToString((ASTpath) children[0])
- + " "
- + children[1].toString().toUpperCase();
+ return ((ASTpath) children[0]).toString() + " " + children[1].toString().toUpperCase();
}
}
}
diff --git a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTname.java b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTname.java
index 7252899..0274fe5 100644
--- a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTname.java
+++ b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTname.java
@@ -31,6 +31,6 @@ public ASTname(DdlParser p, int id) {
@Override
public String toString() {
- return ASTTreeUtils.tokensToString(this);
+ return ASTTreeUtils.tokensToString(this, false);
}
}
diff --git a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTpath.java b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTpath.java
index 89cc756..cb6866f 100644
--- a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTpath.java
+++ b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTpath.java
@@ -31,6 +31,6 @@ public ASTpath(DdlParser p, int id) {
@Override
public String toString() {
- return ASTTreeUtils.tokensToString(this);
+ return ASTTreeUtils.tokensToString(this, false);
}
}
diff --git a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTstored_column.java b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTstored_column.java
index f7f9f03..7974e7a 100644
--- a/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTstored_column.java
+++ b/src/main/java/com/google/cloud/solutions/spannerddl/parser/ASTstored_column.java
@@ -29,6 +29,6 @@ public ASTstored_column(DdlParser p, int id) {
@Override
public String toString() {
- return children[0].toString();
+ return ((ASTpath) children[0]).toString();
}
}
diff --git a/src/main/jjtree-sources/DdlParser.head b/src/main/jjtree-sources/DdlParser.head
index d6efd75..516f4f7 100644
--- a/src/main/jjtree-sources/DdlParser.head
+++ b/src/main/jjtree-sources/DdlParser.head
@@ -56,7 +56,7 @@ options {
}
PARSER_BEGIN(DdlParser)
package com.google.cloud.solutions.spannerddl.parser;
-import java.io.InputStream;import java.io.StringReader;import java.util.*;
+import java.io.StringReader;
public class DdlParser {
public static ASTddl_statement parseDdlStatement(String in)
diff --git a/src/test/java/com/google/cloud/solutions/spannerddl/diff/DdlDiffFromFilesTest.java b/src/test/java/com/google/cloud/solutions/spannerddl/diff/DdlDiffFromFilesTest.java
new file mode 100644
index 0000000..a129a91
--- /dev/null
+++ b/src/test/java/com/google/cloud/solutions/spannerddl/diff/DdlDiffFromFilesTest.java
@@ -0,0 +1,116 @@
+package com.google.cloud.solutions.spannerddl.diff;
+
+import static com.google.cloud.solutions.spannerddl.diff.DdlDiff.ALLOW_DROP_STATEMENTS_OPT;
+import static com.google.cloud.solutions.spannerddl.diff.DdlDiff.ALLOW_RECREATE_CONSTRAINTS_OPT;
+import static com.google.cloud.solutions.spannerddl.diff.DdlDiff.ALLOW_RECREATE_INDEXES_OPT;
+import static com.google.common.truth.Truth.assertWithMessage;
+import static org.junit.Assert.fail;
+
+import com.google.cloud.solutions.spannerddl.testUtils.ReadTestDatafile;
+import com.google.common.collect.ImmutableMap;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+import org.junit.Test;
+
+public class DdlDiffFromFilesTest {
+
+ @Test
+ public void compareDddTextFiles() throws IOException {
+ // Uses 3 files: 2 containing DDL segments to run diffs on, 1 with the expected results
+ // if allowRecreateIndexes and allowDropStatements are set.
+
+ LinkedHashMap originalSegments =
+ ReadTestDatafile.readDdlSegmentsFromFile("originalDdl.txt");
+ LinkedHashMap newSegments =
+ ReadTestDatafile.readDdlSegmentsFromFile("newDdl.txt");
+ LinkedHashMap expectedOutputs =
+ ReadTestDatafile.readDdlSegmentsFromFile("expectedDdlDiff.txt");
+
+ Iterator> originalSegmentIt = originalSegments.entrySet().iterator();
+ Iterator> newSegmentIt = newSegments.entrySet().iterator();
+ Iterator> expectedOutputIt = expectedOutputs.entrySet().iterator();
+
+ String segmentName = null;
+ try {
+ while (originalSegmentIt.hasNext()) {
+ Map.Entry originalSegment = originalSegmentIt.next();
+ segmentName = originalSegment.getKey();
+ Map.Entry newSegment = newSegmentIt.next();
+ Map.Entry expectedOutput = expectedOutputIt.next();
+
+ // verify segment name order for sanity.
+ assertWithMessage("mismatched section names in newDdl.txt")
+ .that(newSegment.getKey())
+ .isEqualTo(segmentName);
+ assertWithMessage("mismatched section names in expectedDdlDiff.txt")
+ .that(expectedOutput.getKey())
+ .isEqualTo(segmentName);
+ List expectedDiff =
+ expectedOutput.getValue() != null
+ ? Arrays.asList(expectedOutput.getValue().split("\n"))
+ : Collections.emptyList();
+
+ DdlDiff ddlDiff = DdlDiff.build(originalSegment.getValue(), newSegment.getValue());
+ // Run diff with allowRecreateIndexes and allowDropStatements
+ List diff =
+ ddlDiff.generateDifferenceStatements(
+ ImmutableMap.of(
+ ALLOW_RECREATE_INDEXES_OPT,
+ true,
+ ALLOW_DROP_STATEMENTS_OPT,
+ true,
+ ALLOW_RECREATE_CONSTRAINTS_OPT,
+ true));
+ // check expected results.
+ assertWithMessage("Mismatch for section " + segmentName).that(diff).isEqualTo(expectedDiff);
+
+ // TEST PART 2: with allowDropStatements=false
+
+ // build an expectedResults without any column or table drops.
+ List expectedDiffNoDrops =
+ expectedDiff.stream()
+ .filter(statement -> !statement.matches(".*DROP (TABLE|COLUMN).*"))
+ .collect(Collectors.toCollection(LinkedList::new));
+
+ // remove any drop indexes from the expectedResults if they do not have an equivalent
+ // CREATE statement. This is because we are allowing recreation of indexes, but not allowing
+ // dropping of removed indexes.
+ for (String statement : expectedDiff) {
+ if (statement.startsWith("DROP INDEX ")) {
+ String indexName = statement.split(" ")[2];
+ // see if there is a matching create statement
+ Pattern p = Pattern.compile("CREATE .*INDEX " + indexName + " ");
+ if (expectedDiffNoDrops.stream().noneMatch(s -> p.matcher(s).find())) {
+ expectedDiffNoDrops.remove(statement);
+ }
+ }
+ }
+
+ diff =
+ ddlDiff.generateDifferenceStatements(
+ ImmutableMap.of(
+ ALLOW_RECREATE_INDEXES_OPT,
+ true,
+ ALLOW_DROP_STATEMENTS_OPT,
+ false,
+ ALLOW_RECREATE_CONSTRAINTS_OPT,
+ true));
+ // check expected results.
+ assertWithMessage("Mismatch for section (noDrops)" + segmentName)
+ .that(diff)
+ .isEqualTo(expectedDiffNoDrops);
+ }
+ } catch (Throwable e) {
+ e.printStackTrace(System.err);
+ fail("Unexpected exception when processing segment " + segmentName + ": " + e);
+ }
+ }
+}
diff --git a/src/test/java/com/google/cloud/solutions/spannerddl/diff/DdlDiffTest.java b/src/test/java/com/google/cloud/solutions/spannerddl/diff/DdlDiffTest.java
index eb18195..8b242e8 100644
--- a/src/test/java/com/google/cloud/solutions/spannerddl/diff/DdlDiffTest.java
+++ b/src/test/java/com/google/cloud/solutions/spannerddl/diff/DdlDiffTest.java
@@ -20,25 +20,14 @@
import static com.google.cloud.solutions.spannerddl.diff.DdlDiff.ALLOW_RECREATE_CONSTRAINTS_OPT;
import static com.google.cloud.solutions.spannerddl.diff.DdlDiff.ALLOW_RECREATE_INDEXES_OPT;
import static com.google.common.truth.Truth.assertThat;
-import static com.google.common.truth.Truth.assertWithMessage;
import static org.junit.Assert.fail;
import com.google.cloud.solutions.spannerddl.parser.ASTddl_statement;
import com.google.cloud.solutions.spannerddl.parser.ParseException;
import com.google.common.collect.ImmutableMap;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
-import java.util.regex.Pattern;
-import java.util.stream.Collectors;
import org.junit.Test;
public class DdlDiffTest {
@@ -349,7 +338,7 @@ public void generateAlterTable_changeGenerationClause() {
"create table test1 (col1 int64, col2 int64, col3 int64 as ( col1/col2 ) stored) primary"
+ " key (col1)",
true,
- "Cannot change generation clause of table test1 column col3 from AS ");
+ "Cannot change generation clause of table test1 column col3 from AS ");
// add generation
getDiffCheckDdlDiffException(
@@ -365,7 +354,7 @@ public void generateAlterTable_changeGenerationClause() {
+ " key (col1)",
"create table test1 (col1 int64, col2 int64, col3 int64) primary key (col1)",
true,
- "Cannot change generation clause of table test1 column col3 from AS");
+ "Cannot change generation clause of table test1 column col3 from AS");
}
@Test
@@ -542,126 +531,4 @@ public void differentIndexesWithRecreate() throws DdlDiffException {
.generateDifferenceStatements(options))
.isEqualTo(Arrays.asList("DROP INDEX index1", "CREATE INDEX index1 ON table1 (col1 DESC)"));
}
-
- @Test
- public void compareDddTextFiles() throws IOException {
- // Uses 3 files: 2 containing DDL segments to run diffs on, 1 with the expected results
- // if allowRecreateIndexes and allowDropStatements are set.
-
- LinkedHashMap originalSegments = readDdlSegmentsFromFile("originalDdl.txt");
- LinkedHashMap newSegments = readDdlSegmentsFromFile("newDdl.txt");
- LinkedHashMap expectedOutputs = readDdlSegmentsFromFile("expectedDdlDiff.txt");
-
- Iterator> originalSegmentIt = originalSegments.entrySet().iterator();
- Iterator> newSegmentIt = newSegments.entrySet().iterator();
- Iterator> expectedOutputIt = expectedOutputs.entrySet().iterator();
-
- String segmentName = null;
- try {
- while (originalSegmentIt.hasNext()) {
- Map.Entry originalSegment = originalSegmentIt.next();
- segmentName = originalSegment.getKey();
- Map.Entry newSegment = newSegmentIt.next();
- Map.Entry expectedOutput = expectedOutputIt.next();
-
- // verify segment name order for sanity.
- assertWithMessage("mismatched section names in newDdl.txt")
- .that(newSegment.getKey())
- .isEqualTo(segmentName);
- assertWithMessage("mismatched section names in expectedDdlDiff.txt")
- .that(expectedOutput.getKey())
- .isEqualTo(segmentName);
- List expectedDiff =
- expectedOutput.getValue() != null
- ? Arrays.asList(expectedOutput.getValue().split("\n"))
- : Collections.emptyList();
-
- DdlDiff ddlDiff = DdlDiff.build(originalSegment.getValue(), newSegment.getValue());
- // Run diff with allowRecreateIndexes and allowDropStatements
- List diff =
- ddlDiff.generateDifferenceStatements(
- ImmutableMap.of(
- ALLOW_RECREATE_INDEXES_OPT,
- true,
- ALLOW_DROP_STATEMENTS_OPT,
- true,
- ALLOW_RECREATE_CONSTRAINTS_OPT,
- true));
- // check expected results.
- assertWithMessage("Mismatch for section " + segmentName).that(diff).isEqualTo(expectedDiff);
-
- // TEST PART 2: with allowDropStatements=false
-
- // build an expectedResults without any column or table drops.
- List expectedDiffNoDrops =
- expectedDiff.stream()
- .filter(statement -> !statement.matches(".*DROP (TABLE|COLUMN).*"))
- .collect(Collectors.toCollection(LinkedList::new));
-
- // remove any drop indexes from the expectedResults if they do not have an equivalent
- // CREATE statement. This is because we are allowing recreation of indexes, but not allowing
- // dropping of removed indexes.
- for (String statement : expectedDiff) {
- if (statement.startsWith("DROP INDEX ")) {
- String indexName = statement.split(" ")[2];
- // see if there is a matching create statement
- Pattern p = Pattern.compile("CREATE .*INDEX " + indexName + " ");
- if (expectedDiffNoDrops.stream().noneMatch(s -> p.matcher(s).find())) {
- expectedDiffNoDrops.remove(statement);
- }
- }
- }
-
- diff =
- ddlDiff.generateDifferenceStatements(
- ImmutableMap.of(
- ALLOW_RECREATE_INDEXES_OPT,
- true,
- ALLOW_DROP_STATEMENTS_OPT,
- false,
- ALLOW_RECREATE_CONSTRAINTS_OPT,
- true));
- // check expected results.
- assertWithMessage("Mismatch for section (noDrops)" + segmentName)
- .that(diff)
- .isEqualTo(expectedDiffNoDrops);
- }
- } catch (Throwable e) {
- e.printStackTrace(System.err);
- fail("Unexpected exception when processing segment " + segmentName + ": " + e);
- }
- }
-
- private LinkedHashMap readDdlSegmentsFromFile(String filename)
- throws IOException {
- File file = new File("src/test/resources/" + filename).getAbsoluteFile();
- LinkedHashMap output = new LinkedHashMap<>();
-
- try (BufferedReader in = new BufferedReader(new FileReader(file))) {
-
- String sectionName = null;
- StringBuilder section = new StringBuilder();
- String line;
- while (null != (line = in.readLine())) {
- line = line.replaceAll("#.*", "").trim();
- if (line.isEmpty()) {
- continue;
- }
- if (line.startsWith("==")) {
- // new section
- if (sectionName != null) {
- // add closed section.
- output.put(sectionName, section.length() > 0 ? section.toString() : null);
- }
- sectionName = line;
- section = new StringBuilder();
- continue;
- } else if (sectionName == null) {
- throw new IOException("no section name before first statement");
- }
- section.append(line).append('\n');
- }
- return output;
- }
- }
}
diff --git a/src/test/java/com/google/cloud/solutions/spannerddl/parser/DDLParserFromFileTest.java b/src/test/java/com/google/cloud/solutions/spannerddl/parser/DDLParserFromFileTest.java
new file mode 100644
index 0000000..018f5de
--- /dev/null
+++ b/src/test/java/com/google/cloud/solutions/spannerddl/parser/DDLParserFromFileTest.java
@@ -0,0 +1,92 @@
+package com.google.cloud.solutions.spannerddl.parser;
+
+import static com.google.common.truth.Truth.assertWithMessage;
+import static org.junit.Assert.fail;
+
+import com.google.cloud.solutions.spannerddl.testUtils.ReadTestDatafile;
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import org.junit.Test;
+
+public class DDLParserFromFileTest {
+
+ @Test
+ public void validateDDLfromFile() throws IOException {
+
+ LinkedHashMap tests =
+ ReadTestDatafile.readDdlSegmentsFromFile("ddlParserValidation.txt");
+
+ Iterator> testIt = tests.entrySet().iterator();
+
+ String segmentName = "unread";
+ while (testIt.hasNext()) {
+ Entry test = testIt.next();
+ segmentName = test.getKey();
+ // remove newlines, indentation and shrink all whitespace to a single space.
+ String ddlStatement = test.getValue().replaceAll("\\s+", " ").trim();
+
+ try (StringReader in = new StringReader(ddlStatement)) {
+ DdlParser parser = new DdlParser(in);
+ parser.ddl_statement();
+ ASTddl_statement parsedStatement = (ASTddl_statement) parser.jjtree.rootNode();
+
+ assertWithMessage("Mismatch for section " + segmentName)
+ .that(parsedStatement.toString())
+ .isEqualTo(ddlStatement);
+ } catch (ParseException e) {
+ fail(
+ "Failed to parse section: '"
+ + segmentName
+ + "': "
+ + e
+ + "\nStatement: "
+ + ddlStatement);
+ }
+ }
+ System.out.println("validateDDLfromFile - tests completed : " + tests.size());
+ }
+
+ @Test
+ public void validateUnsupportedDDLfromFile() throws Exception {
+
+ LinkedHashMap tests =
+ ReadTestDatafile.readDdlSegmentsFromFile("ddlParserUnsupported.txt");
+
+ Iterator> testIt = tests.entrySet().iterator();
+
+ String segmentName = "unread";
+ String ddlStatement = "unread";
+ while (testIt.hasNext()) {
+ Entry test = testIt.next();
+ segmentName = test.getKey();
+ // remove newlines, indentation and shrink all whitespace to a single space.
+ ddlStatement = test.getValue().replaceAll("\\s+", " ").trim();
+
+ try (StringReader in = new StringReader(ddlStatement)) {
+ DdlParser parser = new DdlParser(in);
+ parser.ddl_statement();
+
+ fail(
+ "UnsupportedOperationException not thrown for section '"
+ + segmentName
+ + "'\nStatement: "
+ + ddlStatement);
+ } catch (UnsupportedOperationException e) {
+ /* expected */ ;
+ } catch (ParseException e) {
+ fail(
+ "Failed to parse section: '"
+ + segmentName
+ + "': "
+ + e
+ + "\nStatement: "
+ + ddlStatement);
+ }
+ }
+ System.out.println("validateUnsupportedDDLfromFile - tests completed : " + tests.size());
+ }
+}
diff --git a/src/test/java/com/google/cloud/solutions/spannerddl/parser/DDLParserTest.java b/src/test/java/com/google/cloud/solutions/spannerddl/parser/DDLParserTest.java
index f22aff8..19863b6 100644
--- a/src/test/java/com/google/cloud/solutions/spannerddl/parser/DDLParserTest.java
+++ b/src/test/java/com/google/cloud/solutions/spannerddl/parser/DDLParserTest.java
@@ -30,53 +30,42 @@ public void parseCreateTable() throws ParseException {
ASTcreate_table_statement statement =
(ASTcreate_table_statement)
parse(
- "create table test.test ("
- + "boolcol bool, "
- + "intcol int64 not null, "
- + "floatcol float64, "
- + "`sizedstring` string(55), "
- + "maxstring string(max) NOT NULL DEFAULT (\"prefix\" || sizedstring || \"suffix\"), "
- + "sizedbytes bytes(55), "
- + "maxbytes bytes(max), "
- + "datecol date, "
- + "timestampcol timestamp options (allow_commit_timestamp = true), "
- + "intarray array, "
- + "numericcol numeric,"
- + "jsoncol json, "
- + "pgcolumn pg.something, "
- + "generatedcol string(max) as (sizedstring+ strstr(maxstring,strpos(maxstring,'xxx'),length(maxstring)) +2.0) STORED, "
- + "constraint fk_col_remote FOREIGN KEY(col1, col2) REFERENCES test.other_table(other_col1, other_col2) on delete cascade, "
- + "constraint fk_col_remote2 FOREIGN KEY(col1) REFERENCES test.other_table(other_col1) on delete no action, "
- + "constraint check_some_value CHECK ((length(sizedstring)>100 or sizedstring= \"xxx\") AND boolcol= true and intcol > -123.4 and numericcol < 1.5)"
- + ") "
- + "primary key (intcol ASC, floatcol desc, boolcol), "
- + "interleave in parent `other_table` on delete cascade,"
- + "row deletion policy (OLDER_THAN(timestampcol, INTERVAL 10 DAY))")
+ "create table test.test (boolcol bool, intcol int64 not null, floatcol float64,"
+ + " `sizedstring` string(55), maxstring string(max) NOT NULL DEFAULT"
+ + " (\"prefix\" || sizedstring || \"suffix\"), sizedbytes bytes(55),"
+ + " maxbytes bytes(max), datecol date, timestampcol timestamp options"
+ + " (allow_commit_timestamp = true), intarray array, numericcol"
+ + " numeric,jsoncol json, pgcolumn pg.something, generatedcol string(max)"
+ + " as (sizedstring+"
+ + " strstr(maxstring,strpos(maxstring,'xxx'),length(maxstring)) +2.0)"
+ + " STORED, constraint fk_col_remote FOREIGN KEY(col1, col2) REFERENCES"
+ + " test.other_table(other_col1, other_col2) on delete cascade, constraint"
+ + " fk_col_remote2 FOREIGN KEY(col1) REFERENCES"
+ + " test.other_table(other_col1) on delete no action, constraint"
+ + " check_some_value CHECK ((length(sizedstring)>100 or sizedstring="
+ + " \"xxx\") AND boolcol= true and intcol > -123.4 and numericcol < 1.5))"
+ + " primary key (intcol ASC, floatcol desc, boolcol), interleave in parent"
+ + " `other_table` on delete cascade,row deletion policy"
+ + " (OLDER_THAN(timestampcol, INTERVAL 10 DAY))")
.jjtGetChild(0);
assertThat(statement.toString())
.isEqualTo(
- "CREATE TABLE test.test ("
- + "boolcol BOOL, "
- + "intcol INT64 NOT NULL, "
- + "floatcol FLOAT64, "
- + "`sizedstring` STRING(55), "
- + "maxstring STRING(MAX) NOT NULL DEFAULT (\"prefix\" | | sizedstring | | \"suffix\"), "
- + "sizedbytes BYTES(55), "
- + "maxbytes BYTES(MAX), "
- + "datecol DATE, "
- + "timestampcol TIMESTAMP OPTIONS (allow_commit_timestamp=TRUE), "
- + "intarray ARRAY, "
- + "numericcol NUMERIC, "
- + "jsoncol JSON, "
- + "pgcolumn PG.SOMETHING, "
- + "generatedcol STRING(MAX) AS ( sizedstring + strstr ( maxstring, strpos ( maxstring, 'xxx' ), length ( maxstring ) ) + 2.0 ) STORED, "
- + "CONSTRAINT fk_col_remote FOREIGN KEY (col1, col2) REFERENCES test.other_table (other_col1, other_col2) ON DELETE CASCADE, "
- + "CONSTRAINT fk_col_remote2 FOREIGN KEY (col1) REFERENCES test.other_table (other_col1) ON DELETE NO ACTION, "
- + "CONSTRAINT check_some_value CHECK (( length ( sizedstring ) > 100 OR sizedstring = \"xxx\" ) AND boolcol = TRUE AND intcol > -123.4 AND numericcol < 1.5)"
- + ") PRIMARY KEY (intcol ASC, floatcol DESC, boolcol ASC), "
- + "INTERLEAVE IN PARENT `other_table` ON DELETE CASCADE, "
- + "ROW DELETION POLICY (OLDER_THAN ( timestampcol, INTERVAL 10 DAY ))");
+ "CREATE TABLE test.test (boolcol BOOL, intcol INT64 NOT NULL, floatcol FLOAT64,"
+ + " `sizedstring` STRING(55), maxstring STRING(MAX) NOT NULL DEFAULT (\"prefix\" |"
+ + " | sizedstring | | \"suffix\"), sizedbytes BYTES(55), maxbytes BYTES(MAX),"
+ + " datecol DATE, timestampcol TIMESTAMP OPTIONS (allow_commit_timestamp=TRUE),"
+ + " intarray ARRAY, numericcol NUMERIC, jsoncol JSON, pgcolumn PG.SOMETHING,"
+ + " generatedcol STRING(MAX) AS ( sizedstring + strstr ( maxstring, strpos ("
+ + " maxstring, 'xxx' ), length ( maxstring ) ) + 2.0 ) STORED, CONSTRAINT"
+ + " fk_col_remote FOREIGN KEY (col1, col2) REFERENCES test.other_table (other_col1,"
+ + " other_col2) ON DELETE CASCADE, CONSTRAINT fk_col_remote2 FOREIGN KEY (col1)"
+ + " REFERENCES test.other_table (other_col1) ON DELETE NO ACTION, CONSTRAINT"
+ + " check_some_value CHECK (( length ( sizedstring ) > 100 OR sizedstring = \"xxx\""
+ + " ) AND boolcol = TRUE AND intcol > -123.4 AND numericcol < 1.5)) PRIMARY KEY"
+ + " (intcol ASC, floatcol DESC, boolcol ASC), INTERLEAVE IN PARENT `other_table` ON"
+ + " DELETE CASCADE, ROW DELETION POLICY (OLDER_THAN ( timestampcol, INTERVAL 10 DAY"
+ + " ))");
// Test re-parse of toString output.
ASTcreate_table_statement statement2 =
@@ -125,63 +114,6 @@ public void parseDDLCreateIndexSyntaxError() {
parseCheckingException("Create index index1 on test1", "Was expecting one of:\n\n\"(\" ...");
}
- @Test(expected = UnsupportedOperationException.class)
- public void parseDDLNoDropTable() throws ParseException {
- parseAndVerifyToString("drop table test1");
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void parseDDLNoDropIndex() throws ParseException {
- parseAndVerifyToString("drop index test1");
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void parseDDLNoDropChangeStream() throws ParseException {
- parseAndVerifyToString("drop change stream test1");
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void parseDDLNoCreateChangeStream() throws ParseException {
- parseAndVerifyToString("Create change stream test1 for test2");
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void parseDDLNoCreateView() throws ParseException {
- parseAndVerifyToString("CREATE VIEW test1 SQL SECURITY INVOKER AS SELECT * from test2");
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void parseDDLNoCreateorReplaceView() throws ParseException {
- parseAndVerifyToString(
- "CREATE OR REPLACE VIEW test1 SQL SECURITY INVOKER AS SELECT * from test2");
- }
-
- @Test
- public void parseDDLNoAlterTableRowDeletionPolicy() throws ParseException {
- parseAndVerifyToString(
- "ALTER TABLE Albums "
- + "ADD ROW DELETION POLICY (OLDER_THAN ( timestamp_column, INTERVAL 1 DAY ))");
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void parseDDLNoAlterTableReplaceRowDeletionPolicy() throws ParseException {
- String DDL =
- "ALTER TABLE Albums "
- + "REPLACE ROW DELETION POLICY (OLDER_THAN(timestamp_column, INTERVAL 1 DAY))";
- parseAndVerifyToString(DDL);
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void parseDDLNoDropRowDeletionPolicy() throws ParseException {
- parseAndVerifyToString("ALTER TABLE Albums DROP ROW DELETION POLICY;");
- }
-
- @Test
- public void parseDDLColDefaultValue() throws ParseException {
- parseAndVerifyToString(
- "CREATE TABLE test1 (keycol INT64, value INT64 DEFAULT (keycol * 100 + PI ( ))) PRIMARY KEY (keycol ASC)");
- }
-
@Test
public void parseAlterDatabase() throws ParseException {
parseAndVerifyToString("ALTER DATABASE dbname SET OPTIONS (opt1=NULL,opt2='1234',opt3=3)");
diff --git a/src/test/java/com/google/cloud/solutions/spannerddl/testUtils/ReadTestDatafile.java b/src/test/java/com/google/cloud/solutions/spannerddl/testUtils/ReadTestDatafile.java
new file mode 100644
index 0000000..1e187ce
--- /dev/null
+++ b/src/test/java/com/google/cloud/solutions/spannerddl/testUtils/ReadTestDatafile.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2024 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.cloud.solutions.spannerddl.testUtils;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.LinkedHashMap;
+
+public abstract class ReadTestDatafile {
+
+ /**
+ * Reads the test data file, parsing out the test titles and data from the file.
+ *
+ * @param filename
+ * @return LinkedHashMap of segment name => contents
+ * @throws IOException
+ */
+ public static LinkedHashMap readDdlSegmentsFromFile(String filename)
+ throws IOException {
+ File file = new File("src/test/resources/" + filename).getAbsoluteFile();
+ LinkedHashMap output = new LinkedHashMap<>();
+
+ try (BufferedReader in = new BufferedReader(new FileReader(file))) {
+
+ String sectionName = null;
+ StringBuilder section = new StringBuilder();
+ String line;
+ while (null != (line = in.readLine())) {
+ line = line.replaceAll("#.*", "").trim();
+ if (line.isEmpty()) {
+ continue;
+ }
+ if (line.startsWith("==")) {
+ // new section
+ if (sectionName != null) {
+ // add closed section.
+ output.put(sectionName, section.length() > 0 ? section.toString() : null);
+ }
+ sectionName = line;
+ section = new StringBuilder();
+ continue;
+ } else if (sectionName == null) {
+ throw new IOException("no section name before first statement");
+ }
+ section.append(line).append('\n');
+ }
+ // Check if there is an unclosed last section
+ if (section.length() > 0) {
+ // add last section
+ output.put(sectionName, section.length() > 0 ? section.toString() : null);
+ }
+ return output;
+ }
+ }
+}
diff --git a/src/test/resources/ddlParserUnsupported.txt b/src/test/resources/ddlParserUnsupported.txt
new file mode 100644
index 0000000..e192366
--- /dev/null
+++ b/src/test/resources/ddlParserUnsupported.txt
@@ -0,0 +1,50 @@
+###############################################################################
+# File containing test input output for DDL parser tests for unsupported
+# statements that should throw unsupported operation exceptions -- .
+#
+# Format:
+# '#' = line comment, skipped when file is read.
+# '== TEST NAME ' = section name/separator.
+# Tests are run on each section individually.
+# "TEST NAME" should be the same on all 3 DDL file
+#
+# Each test must have a single DDL statement that is parsed, and then
+# compared to the string rendering of the parsed statemet.
+#
+# Note that leading spaces will be stripped and line breaks converted to spaces
+# to make this file a little more readable
+###############################################################################
+
+== Test 1
+
+ALTER TABLE Albums DROP ROW DELETION POLICY
+
+== Test 2
+
+ALTER TABLE Albums
+REPLACE ROW DELETION POLICY (OLDER_THAN(timestamp_column, INTERVAL 1 DAY))
+
+== Test 3
+
+CREATE OR REPLACE VIEW test1 SQL SECURITY INVOKER AS SELECT * from test2
+
+== Test 4
+
+CREATE VIEW test1 SQL SECURITY INVOKER AS SELECT * from test2
+
+== Test 5
+
+Create change stream test1 for test2
+
+== Test 6
+
+drop change stream test1
+
+== Test 7
+
+drop index test1
+
+== Test 8
+
+drop table test1
+
diff --git a/src/test/resources/ddlParserValidation.txt b/src/test/resources/ddlParserValidation.txt
new file mode 100644
index 0000000..62a14bb
--- /dev/null
+++ b/src/test/resources/ddlParserValidation.txt
@@ -0,0 +1,87 @@
+###############################################################################
+# File containing test input output for DDL parser/validator Tests.
+#
+# Format:
+# '#' = line comment, skipped when file is read.
+# '== TEST NAME ' = section name/separator.
+# Tests are run on each section individually.
+# "TEST NAME" should be the same on all 3 DDL file
+#
+# Each test must have a single DDL statement that is parsed, and then
+# compared to the string rendering of the parsed statemet.
+#
+# Note that leading spaces will be stripped and line breaks converted to spaces
+# to make this file a little more readable
+###############################################################################
+
+== Test 1 Create table with col default value
+
+CREATE TABLE test1
+ (keycol INT64,
+ value INT64 DEFAULT (keycol * 100 + PI ( ))) PRIMARY KEY (keycol ASC)
+
+== Test 2
+
+ALTER DATABASE dbname SET OPTIONS (opt1=NULL,opt2='1234',opt3=3)
+
+== Test 3
+
+ALTER TABLE Albums
+ADD ROW DELETION POLICY (OLDER_THAN ( timestamp_column, INTERVAL 1 DAY ))
+
+== Test 4
+
+CREATE UNIQUE NULL_FILTERED INDEX testindex ON testtable
+(col1 ASC,
+ col2 DESC,
+ col3 ASC) STORING (col4, col5, col6),
+INTERLEAVE IN other_table
+
+== Test 5
+
+CREATE TABLE test.test
+(boolcol BOOL,
+ intcol INT64 NOT NULL,
+ floatcol FLOAT64,
+ `sizedstring` STRING(55),
+ maxstring STRING(MAX) NOT NULL DEFAULT ("prefix" | | sizedstring | | "suffix"),
+ sizedbytes BYTES(55),
+ maxbytes BYTES(MAX),
+ datecol DATE,
+ timestampcol TIMESTAMP OPTIONS (allow_commit_timestamp=TRUE),
+ intarray ARRAY,
+ numericcol NUMERIC,
+ jsoncol JSON,
+ pgcolumn PG.SOMETHING,
+ generatedcol STRING(MAX) AS ( sizedstring + strstr ( maxstring, strpos ( maxstring, 'xxx' ), length ( maxstring ) ) + 2.0 ) STORED,
+ CONSTRAINT fk_col_remote FOREIGN KEY (col1, col2) REFERENCES test.other_table (other_col1, other_col2) ON DELETE CASCADE,
+ CONSTRAINT fk_col_remote2 FOREIGN KEY (col1) REFERENCES test.other_table (other_col1) ON DELETE NO ACTION,
+ CONSTRAINT check_some_value CHECK (( length ( sizedstring ) > 100 OR sizedstring = "xxx" ) AND boolcol = TRUE AND intcol > -123.4 AND numericcol < 1.5))
+PRIMARY KEY (intcol ASC, floatcol DESC, boolcol ASC),
+INTERLEAVE IN PARENT `other_table` ON DELETE CASCADE,
+ROW DELETION POLICY (OLDER_THAN ( timestampcol, INTERVAL 10 DAY ))
+
+
+== Test 6 -- using quoted reserverd words as column names
+
+CREATE TABLE mytable
+(`key` INT64,
+`index` STRING(MAX),
+`table` BYTES(MAX),
+generatedcol INT64 AS ( KEY * INDEX ) STORED,
+CONSTRAINT fk_col_remote2 FOREIGN KEY (`key`) REFERENCES test.other_table (`key`))
+PRIMARY KEY (`key` ASC)
+
+== Test 7 -- using unquoted reserverd words as column names
+
+CREATE TABLE mytable
+(key INT64,
+index STRING(MAX),
+table BYTES(MAX),
+generatedcol INT64 AS ( KEY * INDEX ) STORED,
+CONSTRAINT fk_col_remote2 FOREIGN KEY (key) REFERENCES test.other_table (key))
+PRIMARY KEY (key ASC, index ASC)
+
+== Test 7 -- using unquoted reserverd words as column names in index
+
+CREATE INDEX myIndex ON mytable (key ASC, index ASC) STORING (table)
diff --git a/src/test/resources/expectedDdlDiff.txt b/src/test/resources/expectedDdlDiff.txt
index d5a9dfa..4a26037 100644
--- a/src/test/resources/expectedDdlDiff.txt
+++ b/src/test/resources/expectedDdlDiff.txt
@@ -134,7 +134,7 @@ ALTER TABLE test1 DROP CONSTRAINT ch_in_alter
== TEST 21 add generated col in table
-ALTER TABLE test_gen ADD COLUMN col3 INT64 AS ( col1 * col2 * 2 ) STORED
+ALTER TABLE test_gen ADD COLUMN col3 INT64 AS ( col1 * col2 * 2 ) STORED
== TEST 22 drop generated col in table