diff --git a/.github/workflows/misc.yml b/.github/workflows/misc.yml index df179ee1..910ba296 100644 --- a/.github/workflows/misc.yml +++ b/.github/workflows/misc.yml @@ -28,6 +28,15 @@ jobs: - name: Check version update patch file run: python3 ci/version.py check + verify-antlr: + name: Verify ANTLR-generated code + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Check + working-directory: rs/antlr + run: python3 generate.py --ci + commitlint: name: Lint commits for semantic-release runs-on: ubuntu-latest diff --git a/Cargo.lock b/Cargo.lock index b292ac5a..d2ce6db9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -32,6 +32,23 @@ dependencies = [ "winapi", ] +[[package]] +name = "antlr-rust" +version = "0.3.0-beta" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfc6ab5594c6b2b7aa8719f4ecb785a268b2e0c2529042046035d5cebe9fa7d7" +dependencies = [ + "better_any", + "bit-set", + "byteorder", + "lazy_static", + "murmur3", + "once_cell", + "parking_lot 0.11.2", + "typed-arena", + "uuid", +] + [[package]] name = "anyhow" version = "1.0.55" @@ -61,6 +78,12 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" +[[package]] +name = "better_any" +version = "0.2.0-dev.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cad0e12c5910bf40715b2204434ac301b4456bbf7327229fa89efbe6da0e1971" + [[package]] name = "bit-set" version = "0.5.2" @@ -88,6 +111,12 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72feb31ffc86498dacdbd0fcebb56138e7177a8cc5cea4516031d15ae85a742e" +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + [[package]] name = "bytes" version = "1.1.0" @@ -541,6 +570,15 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" +[[package]] +name = "murmur3" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a198f9589efc03f544388dfc4a19fe8af4323662b62f598b8dcfdac62c14771c" +dependencies = [ + "byteorder", +] + [[package]] name = "nom" version = "7.1.0" @@ -1092,6 +1130,7 @@ dependencies = [ name = "substrait-validator" version = "0.0.8" dependencies = [ + "antlr-rust", "base64", "chrono", "curl", @@ -1268,6 +1307,12 @@ dependencies = [ "serde", ] +[[package]] +name = "typed-arena" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0685c84d5d54d1c26f7d3eb96cd41550adb97baed141a761cf335d3d33bcd0ae" + [[package]] name = "unicode-bidi" version = "0.3.7" diff --git a/proto/substrait/validator/validator.proto b/proto/substrait/validator/validator.proto index 98df4465..8c2e55e4 100644 --- a/proto/substrait/validator/validator.proto +++ b/proto/substrait/validator/validator.proto @@ -73,6 +73,10 @@ message Node { // represents the parse result of the referred file. string resolved_uri = 9; + // This node represents an abstract syntax tree node, used for representing + // complex YAML string parse results. + google.protobuf.Empty ast_node = 10; + // No longer used. The more generic ResolvedUri type is used instead. YamlReference yaml_reference = 5 [deprecated = true]; } diff --git a/rs/Cargo.toml b/rs/Cargo.toml index 4e1b475b..90c312ea 100644 --- a/rs/Cargo.toml +++ b/rs/Cargo.toml @@ -46,6 +46,9 @@ serde_json = "1" # being useful elsewhere too). regex = "1.5" +# Used for the type derivation DSL. +antlr-rust = "0.3.0-beta" + # Used for checking URI syntax. uriparse = "0.6" diff --git a/rs/antlr/.gitignore b/rs/antlr/.gitignore new file mode 100644 index 00000000..44a14c8d --- /dev/null +++ b/rs/antlr/.gitignore @@ -0,0 +1,2 @@ +antlr.jar +__pycache__ diff --git a/rs/antlr/README.md b/rs/antlr/README.md new file mode 100644 index 00000000..bf7d17eb --- /dev/null +++ b/rs/antlr/README.md @@ -0,0 +1,10 @@ +# ANTLR code generation logic + +The validator includes a parser for type expressions based on an ANTLR grammar. +Unfortunately, the ANTLR code generator is written in Java, and would thus add +a huge build dependency (a JRE) to the validator build environment. This is +especially problematic for the distribution of Cargo crates, which are +fundamentally source distributions that should not depend on anything other +than other Rust crates. Therefore, the generated files are checked in to git +and distributed with the crate, and regeneration must thus be done manually. +Call the generate.py script to do so. diff --git a/rs/antlr/antlr.py b/rs/antlr/antlr.py new file mode 100755 index 00000000..528eb510 --- /dev/null +++ b/rs/antlr/antlr.py @@ -0,0 +1,179 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 + +"""Wrapper script to download and run a suitable version of ANTLR for +generating or verifying the Rust bindings for a given grammar.""" + +import urllib.request +import os +import sys +import hashlib +import logging +import tempfile +import shutil +import filecmp +import subprocess +import difflib +import argparse + + +# NOTE: the Rust bindings for ANTLR are not (yet) official, so we need to +# download a forked ANTLR build. +ANTLR_URL = "https://github.com/rrevenantt/antlr4rust/releases/download/antlr4-4.8-2-Rust0.3.0-beta/antlr4-4.8-2-SNAPSHOT-complete.jar" +ANTLR_SHA1 = "775d24ac1ad5df1eb0ed0e802f0fb2a5aeace43c" + + +class Failure(Exception): + """Used for fatal errors.""" + + +def fail(msg): + """Logs and throws an error message.""" + logging.error(msg) + raise Failure(msg) + + +def download_file(fname, url): + """Downloads a file if it does not already exist.""" + if not os.path.isfile(fname): + logging.info(f"Downloading {fname}...") + urllib.request.urlretrieve(ANTLR_URL, fname) + + +def verify_file_hash(fname, hash_str): + """Verifies the hash of a (downloaded) file.""" + logging.info(f"Verifying {fname}...") + with open(fname, "rb") as f: + file_hash = hashlib.sha1() + while chunk := f.read(8192): + file_hash.update(chunk) + actual = file_hash.hexdigest() + if hash_str != actual: + fail(f"Verification failed; hash should be {hash_str} but was {actual}") + + +def verify_file_identical(new, old): + """Verifies that two text files are identical, printing a diff if not.""" + logging.info(f"Verifying {new} against {old}...") + if not os.path.isfile(new): + fail(f"{new} does not exist") + if not os.path.isfile(old): + fail(f"{old} does not exist") + if not filecmp.cmp(new, old, shallow=False): + with open(new, "r") as f: + new_data = f.readlines() + with open(old, "r") as f: + old_data = f.readlines() + sys.stdout.writelines(difflib.unified_diff(old_data, new_data, old, new)) + fail(f"{new} is different, see diff") + + +def run_antlr(antlr, grammar, output_dir, verify=False, java="java"): + """Runns the given ANTLR JAR on the given grammar, sending outputs to + output_dir. If verify is set, instead of copying the newly-generated files, + this checks that there are no differences between the newly and previously + generated files.""" + logging.info("Running ANTLR...") + + # Determine the names of the generated files that we're interested in. + name = os.path.basename(grammar).split(".")[0].lower() + expected_files = [f"{name}lexer.rs", f"{name}parser.rs", f"{name}listener.rs"] + + # Run in a temporary directory, because ANTLR spams random files we didn't + # ask for in its working directory. + with tempfile.TemporaryDirectory() as generate_dir: + shutil.copyfile(grammar, os.path.join(generate_dir, os.path.basename(grammar))) + subprocess.run( + [ + java, + "-jar", + os.path.realpath(antlr), + "-Dlanguage=Rust", + os.path.basename(grammar), + ], + cwd=generate_dir, + ) + + logging.info("Copying/verifying output files...") + for expected_file in expected_files: + src = os.path.join(generate_dir, expected_file) + dest = os.path.join(output_dir, expected_file) + if not os.path.isfile(src): + fail(f"ANTLR failed to generate {expected_file}") + with open(src, "r+") as f: + data = f.read() + data = ( + "// SPDX-License-Identifier: Apache-2.0\n" + "#![allow(clippy::all)]\n" + "#![cfg_attr(rustfmt, rustfmt_skip)]\n" + f"{data}" + ) + f.seek(0) + f.write(data) + if verify: + verify_file_identical(src, dest) + else: + if os.path.exists(dest): + os.unlink(dest) + shutil.copyfile(src, dest) + + +def main(*args): + """Utility to generate Rust bindings for an ANTLR grammar.""" + parser = argparse.ArgumentParser(description=main.__doc__) + parser.add_argument( + "--antlr", + metavar="antlr.jar", + default=os.path.join(os.path.dirname(os.path.realpath(__file__)), "antlr.jar"), + help="alternate location for the ANTLR jar", + ) + parser.add_argument( + "--no-download", + action="store_true", + help="don't attempt to download the ANTLR jar", + ) + parser.add_argument( + "--no-verify", + action="store_true", + help="don't attempt to verify the hash of the ANTLR jar", + ) + parser.add_argument( + "--java", default="java", help="path to java executable to call ANTLR with" + ) + parser.add_argument( + "--ci-check", + action="store_true", + help="instead of regenerating the files, assert that the files do not need to be regenerated", + ) + parser.add_argument("grammar", help="the .g4 grammar file to generate") + parser.add_argument( + "dest_dir", default=".", nargs="?", help="where to copy the generated files to" + ) + args = parser.parse_args(args) + + logging.basicConfig(level=logging.INFO) + + # Acquire ANTLR jar. + if args.no_download: + if not os.path.isfile(args.antlr): + parser.error(f"{args.antlr} does not exist and auto-download is disabled") + else: + download_file(args.antlr, ANTLR_URL) + if not args.no_verify: + verify_file_hash(args.antlr, ANTLR_SHA1) + + # Run ANTLR. + if not os.path.isfile(args.grammar): + parser.error(f"{args.grammar} does not exist") + run_antlr( + args.antlr, args.grammar, args.dest_dir, verify=args.ci_check, java=args.java + ) + + +if __name__ == "__main__": + try: + main(*sys.argv[1:]) + logging.info("Done") + except Failure: + logging.info("Returning failure exit status") + sys.exit(1) diff --git a/rs/antlr/generate.py b/rs/antlr/generate.py new file mode 100755 index 00000000..d4446bfe --- /dev/null +++ b/rs/antlr/generate.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 + +import sys +import logging +import argparse +import antlr + +"""Script for regenerating or verifying all the ANTLR-generated files of the +validator.""" + + +def main(*args): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--ci", + action="store_true", + help="instead of regenerating, verify that the files don't need to be regenerated", + ) + args = parser.parse_args(args) + + logging.basicConfig(level=logging.INFO) + + ci = ["--ci-check"] if args.ci else [] + antlr.main( + "../src/parse/extensions/simple/derivations/SubstraitType.g4", + "../src/parse/extensions/simple/derivations", + *ci, + ) + + +if __name__ == "__main__": + try: + main(*sys.argv[1:]) + logging.info("Done") + except antlr.Failure: + logging.info("Returning failure exit status") + sys.exit(1) diff --git a/rs/src/export/html/mod.rs b/rs/src/export/html/mod.rs index c0d7538d..fd1ae12f 100644 --- a/rs/src/export/html/mod.rs +++ b/rs/src/export/html/mod.rs @@ -524,6 +524,7 @@ fn format_node_tree( tree::NodeType::YamlMap => format!("{brief} {}", format_span("type", "YAML map")), tree::NodeType::YamlArray => format!("{brief} {}", format_span("type", "YAML array")), tree::NodeType::YamlPrimitive(data) => format!("= {}{brief}", format_span("value", data)), + tree::NodeType::AstNode => format!("{brief} {}", format_span("type", "AST node")), }; let header = format!( "{} {value} {}", diff --git a/rs/src/export/proto.rs b/rs/src/export/proto.rs index 0f3f953b..edaed2d6 100644 --- a/rs/src/export/proto.rs +++ b/rs/src/export/proto.rs @@ -188,6 +188,7 @@ impl From<&tree::NodeType> for validator::node::NodeType { validator::node::NodeType::YamlPrimitive(data.into()) } tree::NodeType::ResolvedUri(uri) => validator::node::NodeType::ResolvedUri(uri.clone()), + tree::NodeType::AstNode => validator::node::NodeType::AstNode(()), } } } diff --git a/rs/src/output/diagnostic.rs b/rs/src/output/diagnostic.rs index 87eb4ec9..a52e38ad 100644 --- a/rs/src/output/diagnostic.rs +++ b/rs/src/output/diagnostic.rs @@ -274,6 +274,34 @@ pub enum Classification { #[strum(props(Description = "mismatched nullability"))] TypeMismatchedNullability = 4008, + #[strum(props(Description = "invalid type pattern or derivation expression"))] + TypeDerivationInvalid = 4009, + + // Note the difference between above and below! Above should be used when + // the derivation itself is invalid due to syntax or metatype errors, or in + // other words, when it could *never* match or evaluate, regardless of + // context. Below is used when the derivation itself appears to be sane, + // but it does not apply to the given context. From a user perspective, + // above means that the YAML is wrong, while below means that a function + // is used incorrectly in a plan. Note that we cannot detect all problems + // with type derivation expressions without evaluating them because they + // are dynamically typed. + #[strum(props( + Description = "type pattern or derivation expression failed to match or evaluate" + ))] + TypeDerivationFailed = 4010, + + #[strum(props(Description = "parse error in type pattern or derivation expression"))] + TypeParseError = 4011, + + #[strum(props( + Description = "name resolution error in type pattern or derivation expression" + ))] + TypeResolutionError = 4012, + + #[strum(props(Description = "invalid field name"))] + TypeInvalidFieldName = 4013, + // Relation-related diagnostics (group 5). #[strum(props(HiddenDescription = "relation-related diagnostics"))] Relation = 5000, @@ -330,28 +358,8 @@ pub enum Classification { #[strum(props(Description = "redundant field"))] RedundantField = 7007, - // Type derivations (group 8). - #[strum(props( - HiddenDescription = "diagnostics for type derivation patterns and expressions" - ))] - Derivation = 8000, - - #[strum(props(Description = "invalid type derivation pattern or expression"))] - DerivationInvalid = 8001, - - // Note the difference between above and below! Above should be used when - // the derivation itself is invalid due to syntax or metatype errors, or in - // other words, when it could *never* match or evaluate, regardless of - // context. Below is used when the derivation itself appears to be sane, - // but it does not apply to the given context. From a user perspective, - // above means that the YAML is wrong, while below means that a function - // is used incorrectly in a plan. Note that we cannot detect all problems - // with type derivation expressions without evaluating them because they - // are dynamically typed. - #[strum(props( - Description = "type derivation pattern or expression failed to match or evaluate" - ))] - DerivationFailed = 8002, + #[strum(props(Description = "redundant enum variant"))] + RedundantEnumVariant = 7008, } impl Default for Classification { diff --git a/rs/src/output/extension/namespace.rs b/rs/src/output/extension/namespace.rs index 586ea311..49adb8bd 100644 --- a/rs/src/output/extension/namespace.rs +++ b/rs/src/output/extension/namespace.rs @@ -209,17 +209,24 @@ impl Definition { /// Resolves a name to all items with the same name visible from within this /// namespace (so, including private items). - pub fn resolve_local>(&self, name: S) -> ResolutionResult { - let name = name.as_ref(); - let reference = name.to_string().into(); + pub fn resolve_local(&self, reference: R) -> ResolutionResult + where + R: Into>, + { + let reference = reference.into(); + let name = reference.name.name().unwrap_or("!").to_string(); let mut result = ResolutionResult::new(reference); - self.resolve_internal(&mut result, true, None, name, true); + self.resolve_internal(&mut result, true, None, &name, true); result } /// Resolves a name to all items with the same name visible from outside /// this namespace (so, excluding private items). - pub fn resolve_public(&self, reference: extension::reference::Data) -> ResolutionResult { + pub fn resolve_public(&self, reference: R) -> ResolutionResult + where + R: Into>, + { + let reference = reference.into(); let name = reference.name.name().unwrap_or("!").to_string(); let mut result = ResolutionResult::new(reference); self.resolve_internal(&mut result, false, None, &name, true); @@ -345,7 +352,8 @@ impl std::fmt::Display for ResolutionResult { impl ResolutionResult { /// Creates a new, empty resolution result for the given unresolved - /// reference, to be used as a placeholder if . It will behave as if resolution failed because the namespace + /// reference, to be used as a placeholder when no namespace is actually + /// available. It will behave as if resolution failed because the namespace /// being looked in was itself not resolved. If an item is passed via the /// reference, it will be returned by as_item(). as_namespace() will return /// None. @@ -399,6 +407,7 @@ impl ResolutionResult { if_not_applicable: F1, if_ambiguous: F2, allow_ambiguity: bool, + optional: bool, ) -> Self where F1: FnOnce(String, &mut context::Context) -> bool, @@ -420,6 +429,10 @@ impl ResolutionResult { ), ) } + } else if self.visible_incomplete || optional { + // A visible namespace was not resolved (in which case we + // optimistically assume the item exists) or the item doesn't need + // to exist. } else if !self.invisible.is_empty() { traversal::push_diagnostic( parse_context, @@ -436,11 +449,11 @@ impl ResolutionResult { diagnostic::Level::Error, cause!( LinkUnresolvedName, - "a definition for {} may exists, but would not be visible from here", + "a definition for {} may exist, but would not be visible from here", self.unresolved_reference ), ); - } else if self.visible.first().is_some() || self.filtered { + } else if self.filtered { if !if_not_applicable(self.unresolved_reference.to_string(), parse_context) { traversal::push_diagnostic( parse_context, @@ -482,7 +495,23 @@ impl ResolutionResult { F1: FnOnce(String, &mut context::Context) -> bool, F2: FnOnce(String, &mut context::Context) -> bool, { - self.expect(parse_context, if_not_applicable, if_ambiguous, false) + self.expect(parse_context, if_not_applicable, if_ambiguous, false, false) + } + + /// Expects zero or one item(s), yielding diagnostics if this isn't the + /// case. If ambiguous, the specified function is called. It receives the + /// reference name and the parse context as arguments to form a suitable + /// diagnostic message. If it returns false, a default diagnostic message + /// will be emitted instead. + pub fn expect_not_ambiguous( + self, + parse_context: &mut context::Context, + if_ambiguous: F, + ) -> Self + where + F: FnOnce(String, &mut context::Context) -> bool, + { + self.expect(parse_context, |_, _| true, if_ambiguous, false, true) } /// Expects a one or more items, yielding diagnostics if this isn't the @@ -499,7 +528,7 @@ impl ResolutionResult { where F: FnOnce(String, &mut context::Context) -> bool, { - self.expect(parse_context, if_not_applicable, |_, _| true, true) + self.expect(parse_context, if_not_applicable, |_, _| true, true, false) } /// Silently returns the first matching item, if any. If there are none, @@ -514,6 +543,20 @@ impl ResolutionResult { Arc::new(data) } + /// Silently returns the first matching item, if any. Unlike as_item(), + /// this returns None if there are no matches. + pub fn as_opt_item(&self) -> Option> { + self.visible + .iter() + .filter_map(|x| x.1.as_item()) + .next() + .map(|item| { + let mut data = self.unresolved_reference.clone(); + data.definition.replace(item); + Arc::new(data) + }) + } + /// Silently returns the first matching namespace. Use /// filter_namespaces().expect_one() to formulate error messages if there /// are multiple or no namespaces available. diff --git a/rs/src/output/extension/reference.rs b/rs/src/output/extension/reference.rs index 397eb02f..0a5fb86a 100644 --- a/rs/src/output/extension/reference.rs +++ b/rs/src/output/extension/reference.rs @@ -54,6 +54,15 @@ impl From for Identifier { } } +impl From<&str> for Identifier { + fn from(name: &str) -> Self { + Identifier { + name: Some(name.to_string()), + anchor_path: None, + } + } +} + impl Identifier { /// Create a new anchor-based reference. pub fn new( @@ -160,6 +169,16 @@ impl From for Data { } } +impl From<&str> for Data { + fn from(name: &str) -> Self { + Data { + name: Arc::new(name.into()), + uri: Arc::default(), + definition: None, + } + } +} + /// References are stored in Arcs, so they can be (somewhat) efficiently /// copied. pub type Reference = Arc>; diff --git a/rs/src/output/extension/simple/function.rs b/rs/src/output/extension/simple/function.rs index 95565871..83c535f1 100644 --- a/rs/src/output/extension/simple/function.rs +++ b/rs/src/output/extension/simple/function.rs @@ -169,7 +169,10 @@ pub enum NullabilityHandling { /// for its arguments. If and only if none of the arguments are nullable, /// will output types be non-nullable. This is captured in the patterns by /// replacing all top-level nullability specifiers with an inconsistent - /// binding named with something not yet used anywhere. + /// binding named with something not yet used for anything else. Toplevel + /// bindings that were not yet overriding nullability are furthermore + /// promoted to bindings that do override nullability, using the same + /// inconsistent binding for the nullability specifier. Mirror, /// Specifies that a function can capture any combination of nullability diff --git a/rs/src/output/extension/simple/module.rs b/rs/src/output/extension/simple/module.rs index ffd5f548..52cdd931 100644 --- a/rs/src/output/extension/simple/module.rs +++ b/rs/src/output/extension/simple/module.rs @@ -5,6 +5,72 @@ use crate::output::extension; use std::collections::HashMap; +/// Trait for structs that represent extension modules, providing functions for +/// resolving names in them. +pub trait Scope { + /// Resolves a to-be-resolved reference to a type class. + fn resolve_type_class(&self, name: T) -> extension::simple::type_class::ResolutionResult + where + T: Into; + + /// Resolves a to-be-resolved reference to a type variation. + fn resolve_type_variation( + &self, + name: T, + ) -> extension::simple::type_variation::ResolutionResult + where + T: Into; + + /// Resolves a to-be-resolved reference to a function. + fn resolve_function(&self, name: T) -> extension::simple::function::ResolutionResult + where + T: Into; +} + +/// Same as [Scope], but object-safe. +pub trait DynScope { + /// Resolves a to-be-resolved reference to a type class. + fn resolve_type_class_from_ref( + &self, + name: extension::simple::type_class::UnresolvedReference, + ) -> extension::simple::type_class::ResolutionResult; + + /// Resolves a to-be-resolved reference to a type variation. + fn resolve_type_variation_from_ref( + &self, + name: extension::simple::type_variation::UnresolvedReference, + ) -> extension::simple::type_variation::ResolutionResult; + + /// Resolves a to-be-resolved reference to a function. + fn resolve_function_from_ref( + &self, + name: extension::simple::function::UnresolvedReference, + ) -> extension::simple::function::ResolutionResult; +} + +impl DynScope for T { + fn resolve_type_class_from_ref( + &self, + name: extension::simple::type_class::UnresolvedReference, + ) -> extension::simple::type_class::ResolutionResult { + self.resolve_type_class(name) + } + + fn resolve_type_variation_from_ref( + &self, + name: extension::simple::type_variation::UnresolvedReference, + ) -> extension::simple::type_variation::ResolutionResult { + self.resolve_type_variation(name) + } + + fn resolve_function_from_ref( + &self, + name: extension::simple::function::UnresolvedReference, + ) -> extension::simple::function::ResolutionResult { + self.resolve_function(name) + } +} + /// A parsed simple extension module/file. #[derive(Clone, Debug, Default)] pub struct Definition { @@ -39,32 +105,76 @@ pub struct Definition { pub function_impls: extension::simple::function::NamespaceReference, } -impl Definition { +impl Scope for Definition { /// Resolves a to-be-resolved reference to a type class. - pub fn resolve_type_class( - &self, - name: extension::simple::type_class::UnresolvedReference, - ) -> extension::simple::type_class::ResolutionResult { - self.type_classes.resolve_public(name) + fn resolve_type_class(&self, name: T) -> extension::simple::type_class::ResolutionResult + where + T: Into, + { + self.type_classes.resolve_public(name.into()) } /// Resolves a to-be-resolved reference to a type variation. - pub fn resolve_type_variation( + fn resolve_type_variation( &self, - name: extension::simple::type_variation::UnresolvedReference, - ) -> extension::simple::type_variation::ResolutionResult { - self.type_variations.resolve_public(name) + name: T, + ) -> extension::simple::type_variation::ResolutionResult + where + T: Into, + { + self.type_variations.resolve_public(name.into()) } /// Resolves a to-be-resolved reference to a function. - pub fn resolve_function( - &self, - name: extension::simple::function::UnresolvedReference, - ) -> extension::simple::function::ResolutionResult { - self.function_impls.resolve_public(name) + fn resolve_function(&self, name: T) -> extension::simple::function::ResolutionResult + where + T: Into, + { + self.function_impls.resolve_public(name.into()) } } /// A potentially unresolved reference to a module. Includes the URI even if /// unresolved. The name fields of ExtensionReference are unused. pub type Reference = extension::Reference; + +impl Scope for Reference { + /// Resolves a to-be-resolved reference to a type class. + fn resolve_type_class(&self, name: T) -> extension::simple::type_class::ResolutionResult + where + T: Into, + { + let reference = name.into(); + self.definition + .as_ref() + .map(|x| x.resolve_type_class(reference.clone())) + .unwrap_or_else(|| extension::simple::type_class::ResolutionResult::new(reference)) + } + + /// Resolves a to-be-resolved reference to a type variation. + fn resolve_type_variation( + &self, + name: T, + ) -> extension::simple::type_variation::ResolutionResult + where + T: Into, + { + let reference = name.into(); + self.definition + .as_ref() + .map(|x| x.resolve_type_variation(reference.clone())) + .unwrap_or_else(|| extension::simple::type_variation::ResolutionResult::new(reference)) + } + + /// Resolves a to-be-resolved reference to a function. + fn resolve_function(&self, name: T) -> extension::simple::function::ResolutionResult + where + T: Into, + { + let reference = name.into(); + self.definition + .as_ref() + .map(|x| x.resolve_function(reference.clone())) + .unwrap_or_else(|| extension::simple::function::ResolutionResult::new(reference)) + } +} diff --git a/rs/src/output/extension/simple/type_class.rs b/rs/src/output/extension/simple/type_class.rs index fb1d194c..b1ed3385 100644 --- a/rs/src/output/extension/simple/type_class.rs +++ b/rs/src/output/extension/simple/type_class.rs @@ -112,7 +112,7 @@ impl ParameterInfo for Definition { )); } if let Some(value) = ¶m.value { - if !slot.pattern.match_pattern(value) { + if !slot.pattern.match_pattern(value)? { return Err(cause!( TypeMismatchedParameters, "parameter {} does not match pattern {}", diff --git a/rs/src/output/tree.rs b/rs/src/output/tree.rs index 09568fec..17d5adc1 100644 --- a/rs/src/output/tree.rs +++ b/rs/src/output/tree.rs @@ -210,6 +210,10 @@ pub enum NodeType { /// have a child named "data" with the validation tree of the resolved /// data. ResolvedUri(String), + + /// The associated node represents a node in an abstract syntax tree parsed + /// from a string. + AstNode, } /// Semantical information about a node. diff --git a/rs/src/output/type_system/data/class.rs b/rs/src/output/type_system/data/class.rs index 33c88b9f..f5898e36 100644 --- a/rs/src/output/type_system/data/class.rs +++ b/rs/src/output/type_system/data/class.rs @@ -9,7 +9,9 @@ use crate::output::extension; use crate::output::type_system::data; use crate::output::type_system::meta; use std::collections::HashSet; -use strum_macros::{Display, EnumString}; +use strum_macros::Display; +use strum_macros::EnumIter; +use strum_macros::EnumString; /// Trait for checking the type parameters for a type class. pub trait ParameterInfo { @@ -116,7 +118,7 @@ impl Class { } /// Enumeration of simple types defined by Substrait. -#[derive(Clone, Debug, PartialEq, Eq, Hash, Display, EnumString)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Display, EnumString, EnumIter)] #[strum(ascii_case_insensitive, serialize_all = "snake_case")] pub enum Simple { Boolean, @@ -138,7 +140,7 @@ pub enum Simple { } /// Enumeration of compound types defined by Substrait. -#[derive(Clone, Debug, PartialEq, Eq, Hash, Display, EnumString)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Display, EnumString, EnumIter)] #[strum(ascii_case_insensitive, serialize_all = "UPPERCASE")] pub enum Compound { FixedChar, diff --git a/rs/src/output/type_system/data/type.rs b/rs/src/output/type_system/data/type.rs index eb196626..5726398a 100644 --- a/rs/src/output/type_system/data/type.rs +++ b/rs/src/output/type_system/data/type.rs @@ -115,6 +115,17 @@ impl Definition { }) } + /// Returns a variant of this type with the nullability overridden as + /// specified. + pub fn override_nullable(&self, nullable: bool) -> Arc { + Arc::new(Definition { + class: self.class.clone(), + nullable, + variation: self.variation.clone(), + parameters: self.parameters.clone(), + }) + } + /// Returns the type class. pub fn class(&self) -> &data::Class { &self.class diff --git a/rs/src/output/type_system/meta/context.rs b/rs/src/output/type_system/meta/context.rs index d0dbba93..86275576 100644 --- a/rs/src/output/type_system/meta/context.rs +++ b/rs/src/output/type_system/meta/context.rs @@ -9,6 +9,7 @@ use crate::output::type_system::meta; #[derive(Clone, Debug, Default)] pub struct Context { /// Named bindings that have been previously assigned or matched via - /// patterns. + /// patterns. The keys are stored in lower-case for case-insensitive + /// matching. pub bindings: std::collections::HashMap, } diff --git a/rs/src/output/type_system/meta/function.rs b/rs/src/output/type_system/meta/function.rs index 9107b7e2..9cc092ff 100644 --- a/rs/src/output/type_system/meta/function.rs +++ b/rs/src/output/type_system/meta/function.rs @@ -4,14 +4,18 @@ use crate::output::diagnostic; use crate::output::type_system::meta; -use crate::util; -use crate::util::string::Describe; use super::Pattern; /// A function that operates on zero or more values. -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, strum_macros::Display, strum_macros::EnumString)] +#[strum(serialize_all = "snake_case")] pub enum Function { + /// Used for unknown functions. Takes any number of arguments, doesn't + /// evaluate them, and yields an unresolved value. + #[strum(serialize = "!")] + Unresolved, + /// Boolean not: `not(metabool) -> metabool` Not, @@ -72,38 +76,9 @@ pub enum Function { IfThenElse, } -impl Describe for Function { - fn describe( - &self, - f: &mut std::fmt::Formatter<'_>, - _limit: util::string::Limit, - ) -> std::fmt::Result { - match self { - Function::Not => write!(f, "not"), - Function::And => write!(f, "and"), - Function::Or => write!(f, "or"), - Function::Negate => write!(f, "negate"), - Function::Add => write!(f, "add"), - Function::Subtract => write!(f, "subtract"), - Function::Multiply => write!(f, "multiply"), - Function::Divide => write!(f, "divide"), - Function::Min => write!(f, "min"), - Function::Max => write!(f, "max"), - Function::Equal => write!(f, "equal"), - Function::NotEqual => write!(f, "not_equal"), - Function::GreaterThan => write!(f, "greater_than"), - Function::LessThan => write!(f, "less_than"), - Function::GreaterEqual => write!(f, "greater_equal"), - Function::LessEqual => write!(f, "less_equal"), - Function::Covers => write!(f, "covers"), - Function::IfThenElse => write!(f, "if_then_else"), - } - } -} - -impl std::fmt::Display for Function { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.display().fmt(f) +impl Default for Function { + fn default() -> Self { + Function::Unresolved } } @@ -115,17 +90,18 @@ impl Function { args: &[meta::pattern::Value], ) -> diagnostic::Result { match self { + Function::Unresolved => Ok(meta::Value::Unresolved), Function::Not => { if args.len() != 1 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects a single argument" )) } else if let Some(value) = args[0].evaluate_with_context(context)?.get_boolean() { Ok((!value).into()) } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metabools" )) } @@ -137,7 +113,7 @@ impl Function { Some(false) => return Ok(false.into()), None => { return Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metabools" )) } @@ -152,7 +128,7 @@ impl Function { Some(true) => return Ok(true.into()), None => { return Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metabools" )) } @@ -163,18 +139,18 @@ impl Function { Function::Negate => { if args.len() != 1 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects a single argument" )) } else if let Some(value) = args[0].evaluate_with_context(context)?.get_integer() { if let Some(value) = value.checked_neg() { Ok(value.into()) } else { - Err(cause!(DerivationFailed, "integer overflow in {self}()")) + Err(cause!(TypeDerivationFailed, "integer overflow in {self}()")) } } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metaints" )) } @@ -184,11 +160,11 @@ impl Function { for arg in args.iter() { if let Some(value) = arg.evaluate_with_context(context)?.get_integer() { accumulator = accumulator.checked_add(value).ok_or_else(|| { - cause!(DerivationFailed, "integer overflow in {self}()") + cause!(TypeDerivationFailed, "integer overflow in {self}()") })?; } else { return Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metaints" )); } @@ -198,7 +174,7 @@ impl Function { Function::Subtract => { if args.len() != 2 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects exactly two arguments" )) } else if let (Some(lhs), Some(rhs)) = ( @@ -208,11 +184,11 @@ impl Function { if let Some(value) = lhs.checked_sub(rhs) { Ok(value.into()) } else { - Err(cause!(DerivationFailed, "integer overflow in {self}()")) + Err(cause!(TypeDerivationFailed, "integer overflow in {self}()")) } } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metaints" )) } @@ -222,11 +198,11 @@ impl Function { for arg in args.iter() { if let Some(value) = arg.evaluate_with_context(context)?.get_integer() { accumulator = accumulator.checked_mul(value).ok_or_else(|| { - cause!(DerivationFailed, "integer overflow in {self}()") + cause!(TypeDerivationFailed, "integer overflow in {self}()") })?; } else { return Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metaints" )); } @@ -236,7 +212,7 @@ impl Function { Function::Divide => { if args.len() != 2 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects exactly two arguments" )) } else if let (Some(lhs), Some(rhs)) = ( @@ -246,11 +222,11 @@ impl Function { if let Some(value) = lhs.checked_div(rhs) { Ok(value.into()) } else { - Err(cause!(DerivationFailed, "division by zero in {self}()")) + Err(cause!(TypeDerivationFailed, "division by zero in {self}()")) } } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metaints" )) } @@ -258,7 +234,7 @@ impl Function { Function::Min => { if args.is_empty() { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects at least one argument" )) } else { @@ -270,7 +246,7 @@ impl Function { } } else { return Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metaints" )); } @@ -281,7 +257,7 @@ impl Function { Function::Max => { if args.is_empty() { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects at least one argument" )) } else { @@ -293,7 +269,7 @@ impl Function { } } else { return Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metaints" )); } @@ -304,7 +280,7 @@ impl Function { Function::Equal => { if args.len() != 2 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects exactly two arguments" )) } else { @@ -316,7 +292,7 @@ impl Function { Function::NotEqual => { if args.len() != 2 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects exactly two arguments" )) } else { @@ -328,7 +304,7 @@ impl Function { Function::GreaterThan => { if args.len() != 2 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects exactly two arguments" )) } else if let (Some(lhs), Some(rhs)) = ( @@ -338,7 +314,7 @@ impl Function { Ok((lhs > rhs).into()) } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metaints" )) } @@ -346,7 +322,7 @@ impl Function { Function::LessThan => { if args.len() != 2 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects exactly two arguments" )) } else if let (Some(lhs), Some(rhs)) = ( @@ -356,7 +332,7 @@ impl Function { Ok((lhs < rhs).into()) } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metaints" )) } @@ -364,7 +340,7 @@ impl Function { Function::GreaterEqual => { if args.len() != 2 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects exactly two arguments" )) } else if let (Some(lhs), Some(rhs)) = ( @@ -374,7 +350,7 @@ impl Function { Ok((lhs >= rhs).into()) } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metaints" )) } @@ -382,7 +358,7 @@ impl Function { Function::LessEqual => { if args.len() != 2 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects exactly two arguments" )) } else if let (Some(lhs), Some(rhs)) = ( @@ -392,7 +368,7 @@ impl Function { Ok((lhs <= rhs).into()) } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() can only be applied to metaints" )) } @@ -400,21 +376,31 @@ impl Function { Function::Covers => { if args.len() != 2 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects exactly two arguments" )) } else { let value = args[0].evaluate_with_context(context)?; + // It's possible for a match to capture bindings even if it + // fails in the end, in case of a partial match. However, a + // failing covers() call should not capture anything. So we + // have to make a copy of the context here. let mut context_copy = context.clone(); - Ok(args[1] - .match_pattern_with_context(&mut context_copy, &value) - .into()) + Ok( + if args[1].match_pattern_with_context(&mut context_copy, &value)? { + *context = context_copy; + true + } else { + false + } + .into(), + ) } } Function::IfThenElse => { if args.len() != 3 { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "{self}() expects exactly three arguments" )) } else if let Some(condition) = @@ -423,7 +409,7 @@ impl Function { args[if condition { 1 } else { 2 }].evaluate_with_context(context) } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "the first argument of {self}() must be a metabool" )) } diff --git a/rs/src/output/type_system/meta/pattern.rs b/rs/src/output/type_system/meta/pattern.rs index 88326527..a9fd335a 100644 --- a/rs/src/output/type_system/meta/pattern.rs +++ b/rs/src/output/type_system/meta/pattern.rs @@ -8,6 +8,7 @@ use crate::output::diagnostic; use crate::output::type_system::data; use crate::output::type_system::meta; use crate::util; +use crate::util::string::describe_identifier; use crate::util::string::Describe; use std::fmt::Write; use std::sync::Arc; @@ -22,13 +23,17 @@ pub trait Pattern { fn exactly(value: Self::Value) -> Self; /// Matches this pattern without any provided context. - fn match_pattern(&self, value: &Self::Value) -> bool { + fn match_pattern(&self, value: &Self::Value) -> diagnostic::Result { let mut context = meta::Context::default(); self.match_pattern_with_context(&mut context, value) } /// Matches this pattern with a provided context. - fn match_pattern_with_context(&self, context: &mut meta::Context, value: &Self::Value) -> bool; + fn match_pattern_with_context( + &self, + context: &mut meta::Context, + value: &Self::Value, + ) -> diagnostic::Result; /// Evaluates this pattern without any provided context. fn evaluate(&self) -> diagnostic::Result { @@ -52,28 +57,17 @@ pub trait Pattern { /// [meta::Value]. #[derive(Clone, Debug, PartialEq, Eq)] pub enum Value { - /// Accepts any meta::Value. Syntax: `?`. Also used when a pattern is unknown - /// due to validator-specific error recovery. Cannot be evaluated. + /// Used when a pattern is unknown due to validator-specific error + /// recovery. Matches all values, and evaluates to an unresolved value. + /// Syntax (only for printing): `!`. + Unresolved, + + /// Accepts any meta::Value. Cannot be evaluated. Syntax: `?`. Any, - /// A binding. These act sort of like variables in a given context. When - /// the binding is first matched against a value, it acts like Any and - /// assumes that value. When it is matched again in the same context later, - /// it only matches meta::Values that are exactly equal to the previous - /// match. When evaluated, it evaluates to the value that the binding was - /// bound to, or fails if it was not bound. Syntax: any identifier that - /// isn't recognized as anything else. - Binding(String), - - /// A special binding that accepts any boolean. When the binding is first - /// matched against a value, the binding assumes the value. When it is - /// matched again in the same context later, the binding assumes the - /// boolean OR of the previous value of the binding and the matched value. - /// This is used to handle MIRROR nullability behavior. When evaluated, it - /// evaluates to the value that the binding was bound to, or to false if - /// it was not found. Syntax: any identifier that isn't recognized as - /// anything else, followed by a `?`. - ImplicitOrBinding(String), + /// A binding. These act sort of like variables in a given context. Four + /// variations on the matching and evaluation rules exist; see [Binding]. + Binding(Binding), /// Pattern for booleans. /// - None: both true and false match the pattern. Cannot be evaluated. @@ -131,12 +125,9 @@ impl Describe for Value { limit: util::string::Limit, ) -> std::fmt::Result { match self { + Value::Unresolved => write!(f, "!"), Value::Any => write!(f, "?"), - Value::Binding(name) => util::string::describe_identifier(f, name, limit), - Value::ImplicitOrBinding(name) => { - util::string::describe_identifier(f, name, limit)?; - write!(f, "?") - } + Value::Binding(binding) => binding.describe(f, limit), Value::Boolean(None) => write!(f, "metabool"), Value::Boolean(Some(true)) => write!(f, "true"), Value::Boolean(Some(false)) => write!(f, "false"), @@ -155,10 +146,8 @@ impl Describe for Value { Value::DataType(None) => write!(f, "typename"), Value::DataType(Some(pattern)) => pattern.describe(f, limit), Value::Function(func, args) => { - let (a, b) = limit.split(20); - func.describe(f, a)?; - write!(f, "(")?; - util::string::describe_sequence(f, args, b, 10, |f, arg, _, limit| { + write!(f, "{func}(")?; + util::string::describe_sequence(f, args, limit, 10, |f, arg, _, limit| { arg.describe(f, limit) }) } @@ -172,6 +161,12 @@ impl std::fmt::Display for Value { } } +impl Default for Value { + fn default() -> Self { + Value::Unresolved + } +} + impl Value { /// Match the given value without being lenient about unresolved values. /// Whenever this returns false, the public match_pattern_with_context() @@ -179,37 +174,15 @@ impl Value { /// result with true if so; unresolved values should always match /// everything in order to avoid flooding the user with error messages /// when the validator is confused due to a previous error. - fn match_strictly(&self, context: &mut meta::Context, value: &meta::Value) -> bool { - match self { + fn match_strictly( + &self, + context: &mut meta::Context, + value: &meta::Value, + ) -> diagnostic::Result { + Ok(match self { + Value::Unresolved => true, Value::Any => true, - Value::Binding(name) => { - if let Some(expected) = context.bindings.get(name) { - if value != expected { - return false; - } - } else { - context - .bindings - .insert(name.clone(), value.clone()) - .unwrap(); - } - true - } - Value::ImplicitOrBinding(name) => { - if let Some(mut value) = value.get_boolean() { - if let Some(expected) = context.bindings.get(name) { - if let Some(current) = expected.get_boolean() { - value |= current; - } else { - return false; - } - } - context.bindings.insert(name.clone(), value.into()).unwrap(); - true - } else { - false - } - } + Value::Binding(binding) => binding.match_strictly(context, value)?, Value::Boolean(expected) => { if let Some(value) = value.get_boolean() { if let Some(expected) = expected { @@ -255,7 +228,7 @@ impl Value { Value::DataType(expected) => { if let Some(value) = value.get_data_type() { if let Some(expected) = expected { - expected.match_pattern_with_context(context, &value) + expected.match_pattern_with_context(context, &value)? } else { true } @@ -263,7 +236,20 @@ impl Value { false } } - Value::Function(_, _) => false, + Value::Function(func, args) => Value::exactly(func.evaluate(context, args)?) + .match_pattern_with_context(context, value)?, + }) + } + + /// Returns a pattern that matches the given type exactly. + pub fn exactly_type(meta_type: meta::Type) -> Self { + match meta_type { + meta::Type::Unresolved => Value::Any, + meta::Type::Boolean => Value::Boolean(None), + meta::Type::Integer => Value::Integer(i64::MIN, i64::MAX), + meta::Type::Enum => Value::Enum(None), + meta::Type::String => Value::String(None), + meta::Type::DataType => Value::DataType(None), } } } @@ -273,7 +259,7 @@ impl Pattern for Value { fn exactly(value: Self::Value) -> Self { match value { - meta::Value::Unresolved => Value::Any, + meta::Value::Unresolved => Value::Unresolved, meta::Value::Boolean(x) => Value::Boolean(Some(x)), meta::Value::Integer(x) => Value::Integer(x, x), meta::Value::Enum(x) => Value::Enum(Some(vec![x])), @@ -282,8 +268,12 @@ impl Pattern for Value { } } - fn match_pattern_with_context(&self, context: &mut meta::Context, value: &Self::Value) -> bool { - self.match_strictly(context, value) || value.is_unresolved() + fn match_pattern_with_context( + &self, + context: &mut meta::Context, + value: &Self::Value, + ) -> diagnostic::Result { + Ok(self.match_strictly(context, value)? || value.is_unresolved()) } fn evaluate_with_context( @@ -291,34 +281,18 @@ impl Pattern for Value { context: &mut meta::Context, ) -> diagnostic::Result { match self { - Value::Any => Err(cause!(DerivationInvalid, "? patterns cannot be evaluated")), - Value::Binding(name) => { - if let Some(value) = context.bindings.get(name) { - Ok(value.clone()) - } else { - Err(cause!(DerivationInvalid, "{name} was never bound")) - } - } - Value::ImplicitOrBinding(name) => { - if let Some(value) = context.bindings.get(name) { - if value.get_boolean().is_none() { - Err(cause!( - DerivationInvalid, - "cannot evaluate {name}? because {name} was not bound to a boolean" - )) - } else { - Ok(value.clone()) - } - } else { - Ok(false.into()) - } - } + Value::Unresolved => Ok(meta::Value::Unresolved), + Value::Any => Err(cause!( + TypeDerivationInvalid, + "? patterns cannot be evaluated" + )), + Value::Binding(binding) => binding.evaluate_with_context(context), Value::Boolean(value) => { if let Some(value) = value { Ok((*value).into()) } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "cannot evaluate boolean with unknown value" )) } @@ -328,7 +302,7 @@ impl Pattern for Value { Ok((*low).into()) } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "cannot evaluate integer with unknown value" )) } @@ -339,12 +313,15 @@ impl Pattern for Value { Ok(meta::Value::Enum(values[0].clone())) } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "cannot evaluate enum with unknown value" )) } } else { - Err(cause!(DerivationInvalid, "cannot evaluate undefined enum")) + Err(cause!( + TypeDerivationInvalid, + "cannot evaluate undefined enum" + )) } } Value::String(value) => { @@ -352,7 +329,7 @@ impl Pattern for Value { Ok(value.clone().into()) } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "cannot evaluate string with unknown value" )) } @@ -362,7 +339,7 @@ impl Pattern for Value { value.evaluate_with_context(context).map(meta::Value::from) } else { Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "cannot evaluate undefined data type" )) } @@ -372,16 +349,212 @@ impl Pattern for Value { } } -impl Value { - /// Returns a pattern that matches the given type exactly. - pub fn exactly_type(meta_type: meta::Type) -> Self { - match meta_type { - meta::Type::Unresolved => Value::Any, - meta::Type::Boolean => Value::Boolean(None), - meta::Type::Integer => Value::Integer(i64::MIN, i64::MAX), - meta::Type::Enum => Value::Enum(None), - meta::Type::String => Value::String(None), - meta::Type::DataType => Value::DataType(None), +/// Binding matching structure. Four variations exist, as detailed below. +/// +/// ## Normal bindings +/// +/// (inconsistent = false, nullability = None) +/// +/// When the binding is first matched against a value, it acts like Any and +/// assumes that value. When it is matched again in the same context later, +/// it only matches meta::Values that are exactly equal to the previous +/// match. When evaluated, it evaluates to the value that the binding was +/// bound to, or fails if it was not bound. Syntax: any identifier that +/// isn't recognized as anything else. +/// +/// ## Inconsistent bindings +/// +/// (inconsistent = true, nullability = None) +/// +/// A special binding that always accepts any metavalue. When the binding +/// is first matched against a value, the binding assumes the value. When +/// it is matched again in the same context later, the binding assumes the +/// boolean OR of the previous value of the binding and the matched value +/// if both values happen to be booleans; this is used to handle MIRROR +/// nullability behavior. If one or neither of the values is not a boolean, +/// the binding is not modified; this is used to handle inconsistent +/// variadic function arguments. When evaluated, it evaluates to the value +/// that the binding was bound to, or to false if it was not found; this, +/// again, is used for MIRROR nullability behavior (in the return +/// derivation this time). Syntax: a `?` followed by any identifier. +/// +/// ## Normal bindings with nullability override +/// +/// (inconsistent = false, nullability = Some(pattern)) +/// +/// A normal binding, but with overrides for nullability. Both the incoming +/// and (if any) previously bound value must be typenames. When matching +/// against a previously bound value, the nullability field of the type is +/// ignored; instead, the nullability of the incoming type is always +/// matched against the contained pattern. When evaluating, the nullability +/// of the previously bound value is overridden with the nullability as +/// evaluated by the contained pattern. Otherwise, the rules are the same +/// as for normal bindings. +/// +/// ## Inconsistent bindings with nullability override +/// +/// (inconsistent = true, nullability = Some(pattern)) +/// +/// An inconsistent binding, but with overrides for nullability. Both the +/// incoming and (if any) previously bound value must be typenames. When +/// matching, the nullability of the incoming type is matched against the +/// contained pattern. When evaluating, the nullability of the previously +/// bound value is overridden with the nullability as evaluated by the +/// contained pattern. Unlike for normal inconsistent bindings, the binding +/// must have been previously bound for evaluation to succeed. Otherwise, +/// the rules are the same as for normal inconsistent bindings. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Binding { + /// The name of the binding, using its original case convention. Note that + /// bindings are matched case-insensitively. + pub name: String, + + /// Whether the binding uses consistent or inconsistent matching rules. + pub inconsistent: bool, + + /// Whether this is a normal binding or a binding with nullability + /// override, and if the latter, the nullability pattern. + pub nullability: Option>, +} + +impl Describe for Binding { + fn describe( + &self, + f: &mut std::fmt::Formatter<'_>, + limit: util::string::Limit, + ) -> std::fmt::Result { + if self.inconsistent { + write!(f, "?")?; + } + if let Some(nullability) = &self.nullability { + let (a, b) = limit.split(self.name.len()); + describe_identifier(f, &self.name, a)?; + match nullability.as_ref() { + Value::Boolean(Some(false)) => write!(f, "!")?, + Value::Boolean(Some(true)) => write!(f, "?")?, + Value::Boolean(None) => write!(f, "??")?, + x => { + write!(f, "?")?; + x.describe(f, b)?; + } + } + } else { + describe_identifier(f, &self.name, limit)?; + } + Ok(()) + } +} + +impl std::fmt::Display for Binding { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.display().fmt(f) + } +} + +impl Binding { + /// Match the given binding without being lenient about unresolved values. + /// Whenever this returns false, the public match_pattern_with_context() + /// function will check if the value was unresolved, and override the + /// result with true if so; unresolved values should always match + /// everything in order to avoid flooding the user with error messages + /// when the validator is confused due to a previous error. + pub fn match_strictly( + &self, + context: &mut meta::Context, + value: &meta::Value, + ) -> diagnostic::Result { + // If nullability is specified, the value must be a data type and its + // nullability must match the pattern. + if let Some(nullability) = &self.nullability { + if let meta::Value::DataType(dt) = value { + if !nullability.match_pattern(&dt.nullable().into())? { + return Ok(false); + } + } else { + return Ok(false); + } + } + + // Handle the rest of the matching process. + if let Some(current) = context.bindings.get(&self.name.to_ascii_lowercase()) { + if self.inconsistent { + // Handle the special rule for handling MIRROR nullability. + if current == &meta::Value::Boolean(false) && value == &meta::Value::Boolean(true) { + context + .bindings + .insert(self.name.to_ascii_lowercase(), meta::Value::Boolean(true)); + } + + // Match anything. + Ok(true) + } else if self.nullability.is_some() { + // Match all parts of the metavalue *except* nullability. + if let meta::Value::DataType(current) = current { + DataType::exactly(current.clone()).match_strictly( + context, + &value.get_data_type().unwrap_or_default(), + true, + ) + } else { + Ok(false) + } + } else { + // Match the complete metavalues exactly. + Value::exactly(current.clone()).match_strictly(context, value) + } + } else { + // Bind the incoming value to the binding. + context + .bindings + .insert(self.name.to_ascii_lowercase(), value.clone()); + + // Match anything. + Ok(true) + } + } + + pub fn evaluate_with_context( + &self, + context: &mut meta::Context, + ) -> diagnostic::Result { + if let Some(nullability) = &self.nullability { + // Yield the current value of the binding, augmented with the + // nullability field. + if let Some(current) = context.bindings.get(&self.name.to_ascii_lowercase()) { + if let meta::Value::DataType(current) = current { + let nullability = nullability.evaluate()?; + if let meta::Value::Boolean(nullability) = nullability { + Ok(current.override_nullable(nullability).into()) + } else { + Err(cause!( + TypeDerivationInvalid, + "nullability pattern must yield a metabool, but yielded {nullability}" + )) + } + } else { + Err(cause!( + TypeDerivationInvalid, + "{} must be a data type, but was bound to {current}", + &self.name + )) + } + } else { + Err(cause!( + TypeDerivationInvalid, + "{} was never bound", + &self.name + )) + } + } else if let Some(current) = context.bindings.get(&self.name.to_ascii_lowercase()) { + Ok(current.clone()) + } else if self.inconsistent { + Ok(false.into()) + } else { + Err(cause!( + TypeDerivationInvalid, + "{} was never bound", + &self.name + )) } } } @@ -396,10 +569,11 @@ pub struct DataType { /// Nullability, defined using a (boolean) Pattern. Syntax: /// - no suffix: Boolean(Some(false)) + /// - `!` suffix: Boolean(Some(false)) /// - `?` suffix: Boolean(Some(true)) /// - `??` suffix: Boolean(None) /// - `?` suffix: Binding(_) - /// - `??` suffix: ImplicitOrBinding(_) + /// - `??` suffix: InconsistentBinding(_) pub nullable: Arc, /// Type variation pattern. @@ -422,11 +596,14 @@ impl Describe for DataType { limit: util::string::Limit, ) -> std::fmt::Result { let mut non_recursive = String::new(); - write!( - &mut non_recursive, - "{}{}{}", - self.class, self.nullable, self.variation - )?; + write!(&mut non_recursive, "{}", self.class)?; + match self.nullable.as_ref() { + Value::Boolean(Some(false)) => (), + Value::Boolean(Some(true)) => write!(&mut non_recursive, "?")?, + Value::Boolean(None) => write!(&mut non_recursive, "??")?, + x => write!(&mut non_recursive, "?{x}")?, + } + write!(&mut non_recursive, "{}", self.variation)?; write!(f, "{}", non_recursive)?; if let Some(parameters) = &self.parameters { write!(f, "<")?; @@ -452,31 +629,37 @@ impl DataType { /// result with true if so; unresolved values should always match /// everything in order to avoid flooding the user with error messages /// when the validator is confused due to a previous error. - fn match_strictly(&self, context: &mut meta::Context, value: &data::Type) -> bool { + fn match_strictly( + &self, + context: &mut meta::Context, + value: &data::Type, + ignore_nullability: bool, + ) -> diagnostic::Result { if !value.class().weak_equals(&self.class) { - return false; + return Ok(false); } - if self - .nullable - .match_pattern_with_context(context, &value.nullable().into()) + if !ignore_nullability + && self + .nullable + .match_pattern_with_context(context, &value.nullable().into())? { - return false; + return Ok(false); } - if self.variation.match_pattern(value.variation()) { - return false; + if self.variation.match_pattern(value.variation())? { + return Ok(false); } if let Some(expected) = &self.parameters { let parameters = value.parameters(); if parameters.len() != expected.len() { - return false; + return Ok(false); } for (parameter, expected) in parameters.iter().zip(expected.iter()) { - if !expected.match_pattern_with_context(context, parameter) { - return false; + if !expected.match_pattern_with_context(context, parameter)? { + return Ok(false); } } } - true + Ok(true) } } @@ -499,8 +682,12 @@ impl Pattern for DataType { } } - fn match_pattern_with_context(&self, context: &mut meta::Context, value: &Self::Value) -> bool { - self.match_strictly(context, value) || value.is_unresolved() + fn match_pattern_with_context( + &self, + context: &mut meta::Context, + value: &Self::Value, + ) -> diagnostic::Result { + Ok(self.match_strictly(context, value, false)? || value.is_unresolved()) } fn evaluate_with_context( @@ -514,7 +701,7 @@ impl Pattern for DataType { .get_boolean() .ok_or_else(|| { cause!( - DerivationInvalid, + TypeDerivationInvalid, "nullability pattern evaluated to non-boolean" ) })?; @@ -539,7 +726,8 @@ pub enum Variation { /// Matches any variation that is compatible with the system-preferred /// variation; that is, matches the system-preferred variation and any - /// variation declared with INHERITS function behavior. + /// variation declared with INHERITS function behavior. Syntax: no + /// suffix. Compatible, /// Matches exactly the given variation, regardless of INHERITS function @@ -570,12 +758,12 @@ impl Pattern for Variation { &self, _context: &mut meta::Context, value: &Self::Value, - ) -> bool { - match self { + ) -> diagnostic::Result { + Ok(match self { Variation::Any => true, Variation::Compatible => value.is_compatible_with_system_preferred(), Variation::Exactly(expected) => value == expected, - } + }) } fn evaluate_with_context( @@ -584,7 +772,7 @@ impl Pattern for Variation { ) -> diagnostic::Result { match self { Variation::Any => Err(cause!( - DerivationInvalid, + TypeDerivationInvalid, "cannot evaluate undefined variation" )), Variation::Compatible => Ok(data::Variation::SystemPreferred), @@ -642,6 +830,15 @@ impl std::fmt::Display for Parameter { } } +impl Default for Parameter { + fn default() -> Self { + Self { + name: None, + value: Some(Value::Unresolved), + } + } +} + impl Pattern for Parameter { type Value = data::Parameter; @@ -654,8 +851,12 @@ impl Pattern for Parameter { /// Matches this pattern. Note the special case to let the ? pattern match /// nulls, and note that names are ignored. - fn match_pattern_with_context(&self, context: &mut meta::Context, param: &Self::Value) -> bool { - match &self.value { + fn match_pattern_with_context( + &self, + context: &mut meta::Context, + param: &Self::Value, + ) -> diagnostic::Result { + Ok(match &self.value { None => { // The null pattern only matches nulls. param.value.is_none() @@ -665,9 +866,9 @@ impl Pattern for Parameter { // Special case for nulls and ? to make ? match null. matches!(pattern, Value::Any) } - Some(value) => pattern.match_pattern_with_context(context, value), + Some(value) => pattern.match_pattern_with_context(context, value)?, }, - } + }) } fn evaluate_with_context( diff --git a/rs/src/output/type_system/meta/program.rs b/rs/src/output/type_system/meta/program.rs index 21e17d57..49ed42a1 100644 --- a/rs/src/output/type_system/meta/program.rs +++ b/rs/src/output/type_system/meta/program.rs @@ -18,7 +18,7 @@ use super::Pattern; /// the last maps to a statement, and the last non-empty line maps to the /// expression. # can be used as an end-of-line comment, and semicolons may /// be used in place of newlines (though these do not terminate a # comment). -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct Program { /// Zero or more evaluate-and-match statements to execute before /// evaluating the final expression. @@ -76,7 +76,7 @@ impl Program { pub fn evaluate_type(&self, context: &mut meta::Context) -> diagnostic::Result { self.evaluate(context)?.get_data_type().ok_or_else(|| { cause!( - DerivationInvalid, + TypeDerivationInvalid, "type derivation program must yield a typename" ) }) @@ -93,7 +93,7 @@ impl Program { /// syntax for this is `lhs = rhs`, but `assert rhs matches lhs` and `assert rhs` /// are syntactic sugar for aforementioned patterns to make those easier to /// understand. -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct Statement { /// The pattern appearing on the left-hand side of the evaluate-and-match /// statement. This is what the result of the expression will be matched @@ -132,11 +132,14 @@ impl Statement { .rhs_expression .evaluate_with_context(context) .map_err(|e| e.prefix("while evaluating RHS"))?; - if self.lhs_pattern.match_pattern_with_context(context, &value) { + if self + .lhs_pattern + .match_pattern_with_context(context, &value)? + { Ok(()) } else { Err(cause!( - DerivationFailed, + TypeDerivationFailed, "failed to match {} against pattern", value )) diff --git a/rs/src/parse/extensions/simple/builder.rs b/rs/src/parse/extensions/simple/builder.rs index 09348cde..bc596cc9 100644 --- a/rs/src/parse/extensions/simple/builder.rs +++ b/rs/src/parse/extensions/simple/builder.rs @@ -4,15 +4,63 @@ //! extension file. use crate::output::extension; +use std::collections::HashMap; +use std::sync::Arc; #[derive(Clone, Debug, Default)] pub struct Builder { - // TODO + /// Map with references to dependencies. + pub dependencies: HashMap, + + /// Namespace used for type classes defined in this extension and its + /// dependencies. + pub type_classes: extension::simple::type_class::NamespaceDefinition, + + /// Namespace used for type variations defined in this extension and its + /// dependencies. + pub type_variations: extension::simple::type_variation::NamespaceDefinition, + + /// Namespace used for functions defined in this extension and its + /// dependencies. Both simple and compound names are registered. + pub function_impls: extension::simple::function::NamespaceDefinition, } impl From for extension::simple::module::Definition { - fn from(_: Builder) -> Self { - // TODO - extension::simple::module::Definition::default() + fn from(builder: Builder) -> Self { + extension::simple::module::Definition { + dependencies: builder.dependencies, + type_classes: Arc::new(builder.type_classes), + type_variations: Arc::new(builder.type_variations), + function_impls: Arc::new(builder.function_impls), + } + } +} + +impl extension::simple::module::Scope for Builder { + /// Resolves a to-be-resolved reference to a type class. + fn resolve_type_class(&self, name: T) -> extension::simple::type_class::ResolutionResult + where + T: Into, + { + self.type_classes.resolve_local(name.into()) + } + + /// Resolves a to-be-resolved reference to a type variation. + fn resolve_type_variation( + &self, + name: T, + ) -> extension::simple::type_variation::ResolutionResult + where + T: Into, + { + self.type_variations.resolve_local(name.into()) + } + + /// Resolves a to-be-resolved reference to a function. + fn resolve_function(&self, name: T) -> extension::simple::function::ResolutionResult + where + T: Into, + { + self.function_impls.resolve_local(name.into()) } } diff --git a/rs/src/parse/extensions/simple/derivations.ebnf b/rs/src/parse/extensions/simple/derivations.ebnf new file mode 100644 index 00000000..cb95933e --- /dev/null +++ b/rs/src/parse/extensions/simple/derivations.ebnf @@ -0,0 +1,407 @@ + +/* NOTE: this grammar should be matched case-insensitively. */ + +/** + * A type derivation program consists of zero or more statements followed by + * the final pattern that should evaluate to the derived data type. + */ +program ::= statement-separator* ( statement statement-separator+ )* pattern statement-separator*; + +/** + * Statements are separated from each other and from the final derivation + * expression using newlines or a semicolon. + */ +statement-separator :== [#r#n;]+ ; + +/** + * Statements manipulate the state of the type derivation interpreter before + * the final derivation expression is evaluated. They look like assignment + * statements at first glance, but act more like equality or set containment + * assertions: the right-hand side is evaluated like an expression as you + * might expect, but the left-hand side acts just like the patterns that are + * used to match function argument types. While this is perhaps not the most + * intuitive ruleset, it is extremely easy to implement (it only reuses + * features we already needed anyway), while also being a much more powerful + * primitive than a simple assignment statement, because it can also be used + * for bounds checking and other assertions. For example, if we have a + * function like `fn(VARCHAR(a), VARCHAR(b))` and the implementation of the + * function requires that a + b equals 10, we can simply write "10 = a + b". + * This works, because the pattern "10" will only match the value 10, and + * a pattern mismatch at any point during the matching and evaluation process + * indicates that the implementation is incompatible with the given argument + * types. If you find this syntax confusing, you may also write + * "assert a + b matches 10" or "assert a + b == 10"; the former does the + * exact same thing, while the latter reduces to "true = a + b == 10", which is + * functionally the same thing. + * + * Note that when you use these statements like assignment statements, you can + * only ever reassign a binding to the same value. For example, "a = 10; a = 20" + * will always fail, because a cannot both be 10 and 20 at the same time (more + * accurately, a is bound to 10, so the second statement behaves like + * "10 = 20", and 20 does not match 10). + */ +statement + ::= pattern "=" pattern -> normal + | "assert" pattern "matches" pattern -> match + | "assert" pattern -> assert + ; + +/** + * Patterns are at the core of the type derivation interpreter; they are used + * both for matching and as expressions. However, note that not all types of + * patterns work in both contexts. + */ +pattern ::= pattern-or; + +/** + * Lazily-evaluated boolean OR expression. Maps to builtin or() function if + * more than one pattern is parsed. + */ +pattern-or ::= pattern-and ( "||" pattern-and )* ; + +/** + * Lazily-evaluated boolean AND expression. Maps to builtin and() function if + * more than one pattern is parsed. + */ +pattern-and ::= pattern-eq-neq ( "&&" pattern-eq-neq )* ; + +/** + * Equality and not-equality expressions. These map to the builtin equal() + * and not_equal() functions in left-to-right order. + */ +pattern-eq-neq ::= pattern-ineq ( op-eq-neq pattern-ineq )* ; +op-eq-neq ::= "==" | "!=" ; + +/** + * Integer inequality expressions. These map to the builtin greater_than(), + * less_than(), greater_equal(), and less_equal() functions in left-to-right + * order. + */ +pattern-ineq ::= pattern-add-sub ( op-ineq pattern-add-sub )* ; +op-ineq ::= "<" | "<=" | ">" | ">=" ; + +/** + * Integer addition and subtraction. These map to the builtin add() and + * subtract() functions in left-to-right order. + */ +pattern-add-sub ::= pattern-mul-div ( op-add-sub pattern-mul-div )* ; +op-add-sub ::= "+" | "-" ; + +/** + * Integer multiplication and division. These map to the builtin multiply() and + * divide() functions in left-to-right order. + */ +pattern-mul-div ::= pattern-misc ( op-mul-div pattern-misc )* ; +op-mul-div ::= "*" | "/" ; + +/** + * Miscellaneous patterns that don't need special rules for precedence or + * avoiding left-recursion. + */ +pattern-misc + + /** + * Parentheses for overriding operator precedence. + */ + ::= "(" pattern ")" -> parentheses + + /** + * If-then-else pattern. Can only be evaluated. The first pattern must + * evaluate to a boolean. The second or third pattern is then evaluated + * based on that boolean and returned. The branch that is not selected is + * also not evaluated (i.e. evaluation is lazy). + */ + | "if" pattern "then" pattern "else" pattern -> if-then-else + + /** + * Unary not function. Can only be evaluated and can only be applied to + * booleans. + */ + | "!" pattern -> unary-not + + /** + * The "anything" pattern. This matches everything, and cannot be evaluated. + * It's primarily intended for matching (parts of) argument types, when you + * don't need or want a binding. For example, `equals(?, ?) -> boolean` would + * allow for any combination of argument types. This distinguishes it from + * `equals(any1, any1) -> boolean`, which only accepts equal types; instead + * it behaves like `equals(any1, any2) -> boolean`. `?` is especially useful + * when you want this type of behavior for a variadic function; for example, + * `serialize(?...) -> binary` will match any number and combination of + * argument types, while `serialize(any1...) -> binary` would only accept any + * number of any *one* data type. + */ + | "?" -> any + + /** + * Matches any boolean value. Cannot be evaluated. + */ + | "metabool" -> bool-any + + /** + * Matches and evaluates to the boolean value "true". + */ + | "true" -> bool-true + + /** + * Matches and evaluates to the boolean value "false". + */ + | "false" -> bool-false + + /** + * Matches any integer value. Cannot be evaluated. + */ + | "metaint" -> int-any + + /** + * Matches any integer value within the specified inclusive range. Can only + * be evaluated if the two bounds are equal, in which case it reduces to just + * a single integer. + */ + | integer ".." integer -> int-range + + /** + * Matches any integer value that equals at least the given number. Cannot be + * evaluated. + */ + | integer ".." -> int-at-least + + /** + * Matches any integer value that equals at most the given number. Cannot be + * evaluated. + */ + | ".." integer -> int-at-most + + /** + * Matches and evaluates to exactly the given integer. + */ + | integer -> int-exactly + + /** + * Matches any enumeration constant. + */ + | "metaenum" -> enum-any + + /** + * Matches an enumeration constant in the given set. If only a single + * constant is specified, the pattern evaluates to that constant, otherwise + * it cannot be evaluated. + */ + | "{" identifier ("," identifier)* "}" -> enum-set + + /** + * Matches any string. + */ + | "metastr" -> str-any + + /** + * Matches and evaluates to exactly the given string. + */ + | string -> str-exactly + + /** + * Matches any data type. + */ + | "typename" -> dt-any + + /** + * Evaluates a function. Cannot be matched. The following functions are + * currently available: + * + * - "not(metabool) -> metabool": boolean NOT. + * - "and(metabool*) -> metabool": boolean AND. Evaluated lazily from left + * to right. + * - "or(metabool*) -> metabool": boolean OR. Evaluated lazily from left to + * right. + * - "negate(metaint) -> metaint": integer negation. 64-bit two's complement + * overflow must be detected, and implies that the function implementation + * that the program belongs to does not match the given argument types. + * - "add(metaint*) -> metaint": integer sum. Overflow handled as above. + * - "subtract(metaint, metaint) -> metaint": subtracts an integer from + * another. Overflow handled as above. + * - "multiply(metaint*) -> metaint": integer product. Overflow handled as + * above. + * - "divide(metaint, metaint) -> metaint": divides an integer over + * another. Overflow and division by zero handled as above. + * - "min(metaint+) -> metaint": return the minimum integer value. + * - "max(metaint+) -> metaint": return the maximum integer value. + * - "equal(T, T) -> metabool": return whether the two values are equal. + * - "not_equal(T, T) -> metabool": return whether the two values are not + * equal. + * - "greater_than(metaint, metaint) -> metabool": return whether the left + * integer is greater than the right. + * - "less_than(metaint, metaint) -> metabool": return whether the left + * integer is less than the right. + * - "greater_equal(metaint, metaint) -> metabool": return whether the left + * integer is greater than or equal to the right. + * - "less_equal(metaint, metaint) -> metabool": return whether the left + * integer is less than or equal to the right. + * - "covers(value, pattern) -> metabool": return whether the left value + * matches the pattern. The pattern may make use of bindings that were + * previously defined, but it does NOT capture new bindings regardless + * of whether the pattern match succeeded. + * - "if_then_else(metabool, T, T) -> T": if-then-else expression. Evaluated + * lazily. + * + * Note that many of the functions also have corresponding expressions. These + * expressions are simply syntactic sugar for calling the functions directly. + */ + | identifier "(" ( pattern ("," pattern)* )? ")" -> function + + /** + * This pattern matches one of three things, which are too context-sensitive + * to distinguish at this time: + * + * - a data type pattern; + * - a binding; or + * - an enum constant. + * + * The type depends on the identifier path, and must be disambiguated in a + * three-step process: + * + * - Gather all identifiers that match a builtin type class or an in-scope + * user-defined type class. + * - Gather all enumeration parameter constants that these types declare. + * - Now disambiguate as follows: if an identifier path matches a type + * class, it's a type pattern; if it matches an enumeration parameter + * constant, it's an enum constant pattern; otherwise, it's a binding. + * + * Two types of bindings exist, with different behavior: + * + * - Normal bindings. The subset of the data type pattern syntax used for + * these is just a single identifier with no suffix. When matched the + * first time, this matches anything and binds the identifier to the + * matched value. The next time it will only match the previously bound + * value, and once bound, it will evaluate to the bound value. + * - Implicit-OR bindings. The subset of the data type pattern syntax used + * for these is just a single identifier with exactly and only a "?" + * suffix. These will always match both true and false, and will evaluate + * to whether any true value was matched. This is useful to model + * nullability behavior. For example, `add(i8?n?, i8?n?) -> i8?n?` will + * match any combination of nullabilities for the arguments, and return + * a nullable type if and only if either argument is nullable. + * + * Enum constants only match a single identifier. If a dt-binding-constant + * AST node resolves to a binding or an enum constant, an error should be + * emitted if illegal syntax was used. + */ + | identifier-path nullability? variation? parameters? -> dt-binding-constant + + /** + * Unary negation function. Can only be evaluated and can only be applied to + * integers. Note that this is all the way at the back because signed integer + * literals should be preferred, since those can also be matched, and can + * deal with -2^63 without overflow. + */ + | "-" pattern -> unary-negate + ; + +/** + * Nullability suffix for a data type pattern. + * + * - If there is no such suffix, the pattern matches only non-nullable types, + * and also evaluates to a non-nullable type if applicable. + * - If this suffix is just "?", the pattern matches only nullable types, + * and also evaluates to a nullable type if applicable. + * - If this suffix is a "?" followed by a pattern, the pattern is matched + * against false for non-nullable and true for nullable types. Likewise for + * evaluation; if the pattern evaluates to false the type will be + * non-nullable, if it evaluates to true it will be nullable. + * + * The "?" is also used for implicit-OR bindings. + */ +nullability ::= "?" pattern? ; + +/** + * Type variation suffix. + * + * - If there is no such suffix, the pattern matches any variation that is + * marked as compatible with the system-preferred variation via the function + * behavior option of the variation, as well as the system-preferred + * variation itself. It will evaluate to the system-preferred variation. + * - If the suffix is [0], the pattern matches and evaluates to the + * system-preferred variation exactly. + * - If the suffix is [ident], the pattern matches and evaluates to the named + * variation exactly. The variation must be in scope. + */ +variation ::= "[" variation-body "]" ; +variation-body ::= "?" | zero | identifier-path ; + +/** + * Type parameter pack suffix. + * + * - If there is no such suffix, the pattern accepts any number of parameters + * for the type (assuming that the type class accepts this as well), and + * will attempt to evaluate to a type with no parameters. + * - If there is a "<>" suffix, the pattern accepts only types with zero + * parameters, and will attempt to evaluate to a type with no parameters. + * - If parameters are specified, the pattern accepts only types with exactly + * the specified number of parameters, and will attempt to evaluate to a + * type with exactly those parameters. + */ +parameters ::= "<" ( parameter ("," parameter)* )? ">" ; + +/** + * Type parameter pattern. The name prefix is only used when evaluated (it is + * never matched), and is currently only accepted by the NSTRUCT (pseudo)type. + */ +parameter ::= ( ident-or-string ":" )? optional-pattern ; + +/** + * A pattern for matching potentially-optional parameter values. "null" may be + * used to match or evaluate to explicitly-skipped optional parameters; + * otherwise, the given pattern is used for the parameter value. The "?" (any) + * pattern is special-cased to also match explicitly-skipped parameter slots. + */ +optional-pattern ::= "null" | pattern ; + +/** + * An identifier or a string (so the syntax allows for both). + */ +ident-or-string ::= string | identifier ; + +/** + * An identifier path, separated by periods. + */ +identifier-path ::= ( identifier "." )* identifier ; + +/** + * An identifier. Note that $ signs are legal in identifiers, and note that all + * identifier matching is case-insensitive. + */ +identifier :== [a-zA-Z_$] [a-zA-Z0-9_$]* ; + +/** + * A string literal. + */ +string :== '"' [^"]+ '"' ; + +/** + * An integer literal. + */ +integer ::= sign? unsigned ; + +/** + * The (optional) sign of a signed integer. + */ +sign ::= "-" | "+" ; + +/** + * An unsigned integer. + */ +unsigned ::= zero | nonzero ; + +/** + * The number zero. + */ +zero :== "0" ; + +/** + * A natural+ number. + */ +nonzero :== [1-9] [0-9]* ; + +/** + * Ignore spaces, tabs, and # end-of-line comments. + */ +_ :== [ #t]+ | ## [^#n#r]+ [#r#n]+ ; diff --git a/rs/src/parse/extensions/simple/derivations/.gitignore b/rs/src/parse/extensions/simple/derivations/.gitignore new file mode 100644 index 00000000..6b44e145 --- /dev/null +++ b/rs/src/parse/extensions/simple/derivations/.gitignore @@ -0,0 +1 @@ +.antlr diff --git a/rs/src/parse/extensions/simple/derivations/SubstraitType.g4 b/rs/src/parse/extensions/simple/derivations/SubstraitType.g4 new file mode 100644 index 00000000..f25d5414 --- /dev/null +++ b/rs/src/parse/extensions/simple/derivations/SubstraitType.g4 @@ -0,0 +1,593 @@ +grammar SubstraitType; + +// Note: this grammar is intentionally written to avoid ANTLR-specific features +// that someone who hasn't used ANTLR before might not know about, including +// explicitly avoiding left recursion, such that it can easily be ported to +// other parser generators if necessary. In this way, it hopefully doubles as a +// human-readable specification for this DSL. +// +// This comes at the cost of not generating very nice parse trees. You can use +// this grammar with ANTLR directly if you want, but you might want to rewrite +// it if you intend to use the listener or generated AST directly. +// +// Some things that you will need to know if you've never seen ANTLR before: +// - ANTLR distinguishes between tokenizer rules and parser rules by +// capitalization of the rule name: if the first letter is uppercase, the +// rule is a token rule; if it is lowercase, it is a parser rule. Yuck. +// - When multiple token rules match: +// - choose the token that matches the most text; +// - if same length, use the one defined earlier. +// (ANTLR supports implicit tokens as well, but we don't use them) +// - Parse conflicts are solved using PEG rules. That is, for alternations, +// the first alternative that matches the input is used. For ?, *, and +, +// matching is greedy. +// - The ~ symbol is used to negate character sets, as opposed to the [^...] +// syntax from regular expressions. + + +//============================================================================= +// Whitespace and comment tokens +//============================================================================= + +// Whitespace and comment handling. You can use C-style line and block +// comments. +LineComment : '//' ~[\r\n]* -> channel(HIDDEN) ; +BlockComment : ( '/*' ( ~'*' | '*'+ ~[*/] ) '*'* '*/' ) -> channel(HIDDEN) ; +Whitespace : [ \t]+ -> channel(HIDDEN) ; + +// Type derivations are newline-sensitive, so they're not ignored. +Newline : [\r\n]+ ; + +// Newlines can be embedded by escaping the newline character itself with a +// backslash. +EscNewline : '\\' [\r\n]+ -> channel(HIDDEN) ; + + +//============================================================================= +// Keyword tokens +//============================================================================= + +// Substrait is case-insensitive, ANTLR is not. So, in order to define our +// keywords in a somewhat readable way, we have to define these shortcuts. +// If you've never seen ANTLR before, fragment rules are pretty much just +// glorified preprocessor/search-and-replace macros. +fragment A : [aA]; fragment B : [bB]; fragment C : [cC]; fragment D : [dD]; +fragment E : [eE]; fragment F : [fF]; fragment G : [gG]; fragment H : [hH]; +fragment I : [iI]; fragment J : [jJ]; fragment K : [kK]; fragment L : [lL]; +fragment M : [mM]; fragment N : [nN]; fragment O : [oO]; fragment P : [pP]; +fragment Q : [qQ]; fragment R : [rR]; fragment S : [sS]; fragment T : [tT]; +fragment U : [uU]; fragment V : [vV]; fragment W : [wW]; fragment X : [xX]; +fragment Y : [yY]; fragment Z : [zZ]; + +// Syntactic keywords. +Assert : A S S E R T ; +Matches : M A T C H E S ; +If : I F ; +Then : T H E N ; +Else : E L S E ; + +// Named literal values. +Null : N U L L ; +True : T R U E ; +False : F A L S E ; + +// Metatype identification keywords. +Metabool : M E T A B O O L ; +Metaint : M E T A I N T ; +Metaenum : M E T A E N U M ; +Metastr : M E T A S T R ; +Typename : T Y P E N A M E ; + +// Note that data type classes are not keywords. We support user-defined type +// classes anyway, so name resolution has to be done after parsing anyway. + + +//============================================================================= +// Symbol tokens +//============================================================================= + +// Symbols used. +Period : '.' ; // identifier paths +Comma : ',' ; // separator for pattern lists +Colon : ':' ; // separator for named parameters +Semicolon : ';' ; // separator for statements +Question : '?' ; // any, inconsistent bindings & nullable type suffix +Bang : '!' ; // boolean NOT & explicitly non-nullable type suffix +OpenParen : '(' ; // precedence override & function call args (open) +CloseParen : ')' ; // precedence override & function call args (close) +OpenCurly : '{' ; // enum set patterns (open) +CloseCurly : '}' ; // enum set patterns (close) +OpenSquare : '[' ; // data type variation suffix (open) +CloseSquare : ']' ; // data type variation suffix (close) +Assign : '=' ; // assignment statements +BooleanOr : '||' ; // boolean OR expression +BooleanAnd : '&&' ; // boolean AND expression +Equal : '==' ; // equality expression +NotEqual : '!=' ; // not-equals expression +LessThan : '<' ; // less-than expression & data type parameter pack +LessEqual : '<=' ; // less-equal expression +GreaterThan : '>' ; // greater-than expression & data type parameter pack +GreaterEqual : '>=' ; // greater-equal expression +Plus : '+' ; // additions and integer literal sign +Minus : '-' ; // subtractions, negation, and integer literal sign +Multiply : '*' ; // multiplication expression +Divide : '/' ; // division expression +Range : '..' ; // integer set patterns + + +//============================================================================= +// Procedurally-matched tokens +//============================================================================= + +// Tokens for integer literals. +Nonzero : [1-9] [0-9]* ; +Zero : '0' ; + +// String literal token. +String : '"' ~["] '"' ; + +// Identifier token. Note that $ signs are legal in identifiers, and note that +// all identifier matching is case-insensitive. Note also that keywords take +// precedence. +Identifier : [a-zA-Z_$] [a-zA-Z0-9_$]* ; + + +//============================================================================= +// Grammar rules +//============================================================================= + +// Most things in the simple extension YAMLs that refer to a type are parsed +// using patterns; patterns can both matched and evaluated (not ALL patterns +// can do both, but there is considerable overlap between the two classes, +// so they were conceptually merged). When a type needs to be derived based on +// a number of given metavalues, such as the data types of arguments passed to +// a function, a derivation program is used. Syntactically, the only difference +// is that programs can include a set of statements before the final pattern. +// Newlines can optionally go before or after a type derivation pattern or +// program without affecting syntax. +startPattern : Whitespace* Newline* pattern Newline* EOF ; +startProgram : Whitespace* Newline* program Newline* EOF ; + +// A type derivation program consists of zero or more statements followed by +// the final pattern that should evaluate to the derived data type. +program : ( statement statementSeparator )* pattern ; + +// Statements are separated from each other and from the final derivation +// expression using newlines or a semicolon. +statementSeparator : Newline* ( Newline | Semicolon Newline* ) ; + +// Statements manipulate the state of the type derivation interpreter before +// the final derivation expression is evaluated. They look like assignment +// statements at first glance, but act more like equality or set containment +// assertions: the right-hand side is evaluated like an expression as you +// might expect, but the left-hand side acts just like the patterns that are +// used to match function argument types. While this is perhaps not the most +// intuitive ruleset, it is extremely easy to implement (it only reuses +// features we already needed anyway), while also being a much more powerful +// primitive than a simple assignment statement, because it can also be used +// for bounds checking and other assertions. For example, if we have a +// function like `fn(VARCHAR(a), VARCHAR(b))` and the implementation of the +// function requires that a + b equals 10, we can simply write "10 = a + b". +// This works, because the pattern "10" will only match the value 10, and +// a pattern mismatch at any point during the matching and evaluation process +// indicates that the implementation is incompatible with the given argument +// types. If you find this syntax confusing, you may also write +// "assert a + b matches 10" or "assert a + b == 10"; the former does the +// exact same thing, while the latter reduces to "true = a + b == 10", which is +// functionally the same thing. +// +// Note that when you use these statements like assignment statements, you can +// only ever reassign a binding to the same value. For example, "a = 10; a = 20" +// will always fail, because a cannot both be 10 and 20 at the same time (more +// accurately, a is bound to 10, so the second statement behaves like +// "10 = 20", and 20 does not match 10). +statement + : pattern Assign pattern #Normal + | Assert pattern Matches pattern #Match + | Assert pattern #Assert + ; + +// Patterns are at the core of the type derivation interpreter; they are used +// both for matching and as expressions. However, note that not all types of +// patterns work in both contexts. +pattern : patternOr ; + +// Lazily-evaluated boolean OR expression. Maps to builtin or() function if +// more than one pattern is parsed. +patternOr : patternAnd ( operatorOr patternAnd )* ; +operatorOr : BooleanOr #Or ; + +// Lazily-evaluated boolean AND expression. Maps to builtin and() function if +// more than one pattern is parsed. +patternAnd : patternEqNeq ( operatorAnd patternEqNeq )* ; +operatorAnd : BooleanAnd #And ; + +// Equality and not-equality expressions. These map to the builtin equal() +// and not_equal() functions in left-to-right order. +patternEqNeq : patternIneq ( operatorEqNeq patternIneq )* ; +operatorEqNeq : Equal #Eq | NotEqual #Neq ; + +// Integer inequality expressions. These map to the builtin greater_than(), +// less_than(), greater_equal(), and less_equal() functions in left-to-right +// order. +patternIneq : patternAddSub ( operatorIneq patternAddSub )* ; +operatorIneq : LessThan #Lt | LessEqual #Le | GreaterThan #Gt | GreaterEqual #Ge ; + +// Integer addition and subtraction. These map to the builtin add() and +// subtract() functions in left-to-right order. +patternAddSub : patternMulDiv ( operatorAddSub patternMulDiv )* ; +operatorAddSub : Plus #Add | Minus #Sub ; + +// Integer multiplication and division. These map to the builtin multiply() and +// divide() functions in left-to-right order. +patternMulDiv : patternMisc ( operatorMulDiv patternMisc )* ; +operatorMulDiv : Multiply #Mul | Divide #Div ; + +// Miscellaneous patterns that don't need special rules for precedence or +// avoiding left-recursion. +patternMisc + + // Parentheses for overriding operator precedence. + : OpenParen pattern CloseParen #parentheses + + // If-then-else pattern. Can only be evaluated. The first pattern must + // evaluate to a boolean. The second or third pattern is then evaluated + // based on that boolean and returned. The branch that is not selected is + // also not evaluated (i.e. evaluation is lazy). + | If pattern Then pattern Else pattern #ifThenElse + + // Unary not function. Can only be evaluated and can only be applied to + // booleans. + | Bang pattern #unaryNot + + // The "anything" pattern. This matches everything, and cannot be evaluated. + // It's primarily intended for matching (parts of) argument types, when you + // don't need or want a binding. For example, `equals(?, ?) -> boolean` would + // allow for any combination of argument types. This distinguishes it from + // `equals(any1, any1) -> boolean`, which only accepts equal types; instead + // it behaves like `equals(any1, any2) -> boolean`. `?` is especially useful + // when you want this type of behavior for a variadic function; for example, + // `serialize(?...) -> binary` will match any number and combination of + // argument types, while `serialize(any1...) -> binary` would only accept any + // number of any *one* data type. + | Question #any + + // Matches any boolean value. Cannot be evaluated. + | Metabool #boolAny + + // Matches and evaluates to the boolean value "true". + | True #boolTrue + + // Matches and evaluates to the boolean value "false". + | False #boolFalse + + // Matches any integer value. Cannot be evaluated. + | Metaint #intAny + + // Matches any integer value within the specified inclusive range. Can only + // be evaluated if the two bounds are equal, in which case it reduces to just + // a single integer. + | integer Range integer #intRange + + // Matches any integer value that equals at least the given number. Cannot be + // evaluated. + | integer Range #intAtLeast + + // Matches any integer value that equals at most the given number. Cannot be + // evaluated. + | Range integer #intAtMost + + // Matches and evaluates to exactly the given integer. + | integer #intExactly + + // Matches any enumeration constant. + | Metaenum #enumAny + + // Matches an enumeration constant in the given set. If only a single + // constant is specified, the pattern evaluates to that constant, otherwise + // it cannot be evaluated. + | OpenCurly Identifier (Comma Identifier)* CloseCurly #enumSet + + // Matches any string. + | Metastr #strAny + + // Matches and evaluates to exactly the given string. + | String #strExactly + + // Matches any data type. + | Typename #dtAny + + // Evaluates a function. When a function is used in match context, the + // function (and its arguments) will be *evaluated* instead, and the incoming + // value is matched against the result. This means that it is legal to define + // a function like f(VARCHAR(x), VARCHAR(y), VARCHAR(x + y)) because the x + // and y bindings are captured before x + y is evaluated, but it is NOT legal + // to define it like f(VARCHAR(x + y), VARCHAR(x), VARCHAR(y)) because x and + // y are not yet bound when x + y is evaluated. + // f(VARCHAR(x), VARCHAR(x + y), VARCHAR(y)) is also NOT legal, again because + // some of the function bindings have not yet been captured, even though + // mathematically this could be rewritten from x + y <- input to + // y <= input - x (the evaluator is not smart enough for this, and this + // rewriting cannot be generalized over all functions). + // + // The following functions are currently available: + // + // - "not(metabool) -> metabool": boolean NOT. + // - "and(metabool*) -> metabool": boolean AND. Evaluated lazily from left + // to right. + // - "or(metabool*) -> metabool": boolean OR. Evaluated lazily from left to + // right. + // - "negate(metaint) -> metaint": integer negation. 64-bit two's complement + // overflow must be detected, and implies that the function implementation + // that the program belongs to does not match the given argument types. + // - "add(metaint*) -> metaint": integer sum. Overflow handled as above. + // - "subtract(metaint, metaint) -> metaint": subtracts an integer from + // another. Overflow handled as above. + // - "multiply(metaint*) -> metaint": integer product. Overflow handled as + // above. + // - "divide(metaint, metaint) -> metaint": divides an integer over + // another. Overflow and division by zero handled as above. + // - "min(metaint+) -> metaint": return the minimum integer value. + // - "max(metaint+) -> metaint": return the maximum integer value. + // - "equal(T, T) -> metabool": return whether the two values are equal. + // - "not_equal(T, T) -> metabool": return whether the two values are not + // equal. + // - "greater_than(metaint, metaint) -> metabool": return whether the left + // integer is greater than the right. + // - "less_than(metaint, metaint) -> metabool": return whether the left + // integer is less than the right. + // - "greater_equal(metaint, metaint) -> metabool": return whether the left + // integer is greater than or equal to the right. + // - "less_equal(metaint, metaint) -> metabool": return whether the left + // integer is less than or equal to the right. + // - "covers(value, pattern) -> metabool": return whether the left value + // matches the pattern. The pattern may make use of bindings that were + // previously defined. New bindings are captured if and only if covers + // returns true. This allows for patterns like + // assert if covers(x, struct) then a < 10 \ + // else if covers(x, struct) then a + b < 10 \ + // else false; + // to be written and work as expected. + // - "if_then_else(metabool, T, T) -> T": if-then-else expression. Evaluated + // lazily. + // + // Note that many of the functions also have corresponding expressions. These + // expressions are simply syntactic sugar for calling the functions directly. + | Identifier OpenParen ( pattern (Comma pattern)* )? CloseParen #function + + // This pattern matches one of three things, which are too context-sensitive + // to distinguish at this time: + // + // - a data type pattern; + // - an enum constant; + // - a normal binding; or + // - a binding with nullability override. + // + // The type depends on the identifier path, and must be disambiguated as + // follows during name resolution: + // + // - Keep track of a case-insensitive mapping from name to binding, enum + // constant, or type class while analyzing the parse tree. It will be + // empty initially. + // - Whenever this pattern appears, resolve the name using this mapping: + // - If resolution fails, resolve the name as a type class instead (it + // could be the name of a builtin type class, a type class defined + // in the current extension, or a type class defined in a dependency + // if appropriately prefixed with the dependency namespace): + // - If this succeeds, add an entry to the name mapping, mapping the + // incoming identifier path to the type class. If the type class is + // user-defined, and the type class has enum parameter slots, also + // add entries to the name mapping for all the enum variants; if a + // name was already defined, do NOT update the mapping. Finally, + // disambiguate the pattern as a data type pattern. + // - If this fails and the identifier path consists of only a single + // element, map the incoming identifier path to a binding, and + // disambiguate the pattern as a normal binding or a binding + // with nullability override, depending on the presence of the + // nullability field. + // - If the above fails and the identifier path consists of multiple + // elements, analysis should fail. + // - If resolution yields a binding, disambiguate the pattern as a + // normal binding or a binding with nullability override, depending on + // the presence of the nullability field. + // - If resolution yields an enum constant, disambiguate the pattern as + // an enum constant. + // - If resolution yields a type class, disambiguate the pattern as a + // data type pattern. + // + // If the optional nullability, variation, or parameters fields are non-empty + // when they can't be according to the rules of the disambiguated pattern + // type, analysis should fail. + // + // Note that the `!` suffix disambiguates between a normal binding and a + // binding with a non-nullable nullability override. For a data type pattern, + // non-nullable is the default, so something like `i32` is exactly the same + // as `i32!`. + // + // The behavior for the resolved pattern types is: + // - Data type pattern: + // - Matches a metavalue if and only if: + // - the metavalue is a typename; + // - the type class matches the identified class; + // - the nullability of the type matches the rules detailed in the + // comments of the nullability rule; + // - the variation of the type matches the rules detailed in the + // comments of the variation rule; and + // - the parameter pack matches the rules detailed in the comments + // of the parameters rule. + // - Evaluates to a data type with the specified type class and the + // evaluation result of the nullability, variation, and parameters + // fields. If any of those things cannot be evaluated, the data type + // pattern cannot be evaluated. If any parameter pack constraint + // violations result from this, they are treated as pattern match + // failures (i.e., if this happens in a return type derivation of + // a function, the function is said to not match the given arguments). + // + // - Enum constant: + // - Matches a metavalue if and only if it is exactly the specified enum + // variant. + // - Evaluates to the specified enum variant. + // - The nullability, variation, and parameters fields are illegal and + // must be blank. + // + // - Normal binding: + // - If this is the first use of the binding, matches any value. The + // incoming metavalue is bound to the binding as a side effect. + // - If the binding was previously bound, matches only if the incoming + // metavalue is exactly equal to the previous binding. + // - Can only be evaluated if the binding was previously bound, in which + // case it yields the bound value exactly. + // - The variation and parameters fields are illegal and must be blank. + // + // - Binding with nullability override: + // - If this is the first use of the binding, matches if and only if: + // - the incoming metavalue is a typename; and + // - the nullability of the incoming type matches the nullability + // field. + // If the above rules match, the incoming type is bound to the + // binding as a side effect. + // - If the binding was previously bound, matches if and only if: + // - the incoming metavalue is a typename; + // - the nullability of the incoming type matches the nullability + // field; + // - the previously bound metavalue is a typename; and + // - the incoming type matches the previously bound type, ignoring + // nullability. + // - Can only be evaluated if the binding was previously bound. If the + // previously bound metavalue is not a typename, this is treated as a + // pattern match failure. The returned type is the previously bound + // type, with its nullability adjusted according to the nullability + // field evaluation rules. + // - The variation and parameters fields are illegal and must be blank. + | identifierPath nullability? variation? parameters? #datatypeBindingOrConstant + + // Pattern for inconsistent bindings. Inconsistent bindings are variations + // of normal bindings and bindings with nullability override with looser + // matching and extended evaluation rules. These rules are designed + // specifically for matching inconsistent variadic arguments and for + // modelling MIRROR nullability behavior. Specifically: + // + // - Use `?T` instead of `T` for a variadic argument slot to capture the + // value of the first argument and ignore the rest, thus rendering it + // inconsistent. + // - Use `type??nullable` instead of `type` for argument slots and the + // return type to match both nullable and non-nullable data types for + // the argument, and yield a nullable return type only if any of the + // bound arguments are nullable. + // + // The exacty behavior for the pattern types is as follows. Rules that differ + // from the consistent binding rules are highlighted with (!). + // + // - Normal inconsistent binding: + // - If this is the first use of the binding, matches any value. The + // incoming metavalue is bound to the binding as a side effect. + // - (!) If the binding was previously bound, matches any value. If the + // incoming metavalue is boolean true, and the currently bound + // metavalue is boolean false, update the binding to boolean true. + // Otherwise, leave it unchanged. + // - (!) If this is the first use of the binding, evaluation yields + // the metabool `false` (for the nullability of the return type in + // a MIRROR function). + // - If the binding was previously bound, evaluation yields the bound + // value exactly. + // + // - Inconsistent binding with nullability override: + // - If this is the first use of the binding, matches if and only if: + // - the incoming metavalue is a typename; and + // - the nullability of the incoming type matches the nullability + // field. + // If the above rules match, the incoming type is bound to the + // binding as a side effect. + // - (!) If the binding was previously bound, matches if and only if: + // - the incoming metavalue is a typename; and + // - the nullability of the incoming type matches the nullability + // field. + // The binding is not modified. + // - Can only be evaluated if the binding was previously bound. If the + // previously bound metavalue is not a typename, this is treated as a + // pattern match failure. The returned type is the previously bound + // type, with its nullability adjusted according to the nullability + // field evaluation rules. + // - The variation and parameters fields are illegal and must be blank. + | Question Identifier nullability? #inconsistent + + // Unary negation function. Can only be evaluated and can only be applied to + // integers. Note that this is all the way at the back because signed integer + // literals should be preferred, since those can also be matched, and can + // deal with -2^63 without overflow. + | Minus pattern #unaryNegate + ; + +// Nullability suffix for a data type pattern. +// +// - If there is no such suffix, or the suffix is "!", the pattern matches +// only non-nullable types, and also evaluates to a non-nullable type if +// applicable. The "!" suffix is necessary to distinguish between normal +// bindings and bindings with nullability override, but is otherwise +// optional and normally not written. +// - If this suffix is just "?", the pattern matches only nullable types, +// and also evaluates to a nullable type if applicable. +// - If this suffix is a "?" followed by a pattern, the pattern is matched +// against false for non-nullable and true for nullable types. Likewise for +// evaluation; if the pattern evaluates to false the type will be +// non-nullable, if it evaluates to true it will be nullable. +// +// The "?" is also used for implicit-OR bindings. +nullability + : Bang #nonNullable + | Question #nullable + | Question pattern #nullableIf + ; + +// Type variation suffix. +// +// - If there is no such suffix, the pattern matches any variation that is +// marked as compatible with the system-preferred variation via the function +// behavior option of the variation, as well as the system-preferred +// variation itself. It will evaluate to the system-preferred variation. +// - If the suffix is [?], the pattern matches any variation, and cannot be +// evaluated. +// - If the suffix is [0], the pattern matches and evaluates to the +// system-preferred variation exactly. +// - If the suffix is [ident], the pattern matches and evaluates to the named +// variation exactly. The variation must be in scope. +variation : OpenSquare variationBody CloseSquare ; +variationBody + : Question #varAny + | Zero #varSystemPreferred + | identifierPath #varUserDefined + ; + +// Type parameter pack suffix. +// +// - If there is no such suffix, the pattern accepts any number of parameters +// for the type (assuming that the type class accepts this as well), and +// will attempt to evaluate to a type with no parameters. +// - If there is a "<>" suffix, the pattern accepts only types with zero +// parameters, and will attempt to evaluate to a type with no parameters. +// - If parameters are specified, the pattern accepts only types with exactly +// the specified number of parameters, and will attempt to evaluate to a +// type with exactly those parameters. +parameters : LessThan ( parameter (Comma parameter)* )? GreaterThan ; + +// Type parameter pattern. The name prefix is only used when evaluated (it is +// never matched), and is currently only accepted by the NSTRUCT (pseudo)type. +parameter : ( identifierOrString Colon )? parameterValue ; + +// A pattern for matching potentially-optional parameter values. "null" may be +// used to match or evaluate to explicitly-skipped optional parameters; +// otherwise, the given pattern is used for the parameter value. The "?" (any) +// pattern is special-cased to also match explicitly-skipped parameter slots. +parameterValue : Null #Null | pattern #Specified; + +// Integer literals. +integer : ( Plus | Minus )? ( Zero | Nonzero ) ; + +// When identifying user-defined types and variations, period-separated +// namespace paths are supported. +identifierPath : ( Identifier Period )* Identifier ; + +// The names of parameters (i.e. NSTRUCT field names) can be specified using +// both identifiers and strings. The latter is idiomatic only when the field +// name is not a valid Substrait identifier. +identifierOrString : String #Str | Identifier #Ident; diff --git a/rs/src/parse/extensions/simple/derivations/mod.rs b/rs/src/parse/extensions/simple/derivations/mod.rs new file mode 100644 index 00000000..46b4349d --- /dev/null +++ b/rs/src/parse/extensions/simple/derivations/mod.rs @@ -0,0 +1,1020 @@ +// SPDX-License-Identifier: Apache-2.0 + +//! Module for parsing type derivations using ANTLR. + +// TODO +#![allow(dead_code)] + +mod substraittypelexer; +mod substraittypelistener; +mod substraittypeparser; + +use crate::output::diagnostic::Result; +use crate::output::extension; +use crate::output::extension::simple::module::DynScope; +use crate::output::type_system::data; +use crate::output::type_system::meta; +use crate::output::type_system::meta::Pattern; +use crate::parse::context; +use antlr_rust::Parser; +use itertools::Itertools; +use std::cell::RefCell; +use std::collections::HashMap; +use std::collections::HashSet; +use std::rc::Rc; +use std::sync::Arc; +use strum::IntoEnumIterator; +use substraittypeparser::*; + +/// Enum for objects that are defined locally in analysis scope. +#[derive(Clone, Debug)] +enum PatternObject { + /// A named binding. + NamedBinding(String), + + /// A variant name of an enumeration defined as a parameter of a + /// user-defined type class that is within scope (type has been + /// used). + EnumVariant(String), + + /// A type class. + TypeClass(data::Class), +} + +/// Context/state information used while analyzing type patterns and +/// derivations. The lifetime of the context should match the lifetime of +/// the evaluation context; for functions, for example, this means that the +/// same context must be used for all argument patterns, the intermediate +/// type derivation (if any), and the return type derivation. +pub struct AnalysisContext<'a> { + /// The scope that we ultimately use to resolve names while analyzing. + scope: Option<&'a dyn DynScope>, + + /// Names defined locally. This namespace can reference type classes, + /// type parameter enumeration names, and named bindings. The keys are + /// stored in lowercase for case insensitive matching. + pattern_names: HashMap, +} + +impl<'a> Clone for AnalysisContext<'a> { + fn clone(&self) -> Self { + Self { + scope: self.scope, + pattern_names: self.pattern_names.clone(), + } + } +} + +impl<'a> AnalysisContext<'a> { + /// Makes a new analysis context from the given resolver, representing the + /// scope in which the type patterns/derivations are analyzed. + pub fn new(scope: Option<&'a dyn DynScope>) -> Self { + // Declare built-in type classes. + let mut pattern_names = HashMap::new(); + for simple in data::class::Simple::iter() { + pattern_names.insert( + simple.to_string().to_ascii_lowercase(), + PatternObject::TypeClass(data::Class::Simple(simple)), + ); + } + for compound in data::class::Compound::iter() { + pattern_names.insert( + compound.to_string().to_ascii_lowercase(), + PatternObject::TypeClass(data::Class::Compound(compound)), + ); + } + + Self { + scope, + pattern_names, + } + } + + /// Resolve a local identifier path. This always succeeds, because if the + /// name wasn't already defined, it will implicitly become a named binding. + /// Resolving a path to a type class may also have side effects; if the + /// type class is user-defined and has enumeration type parameters, those + /// names will be implicitly defined as being enum variants. + fn resolve_pattern(&mut self, x: I, y: &mut context::Context) -> PatternObject + where + S: AsRef, + I: Iterator, + { + let path = x.collect::>(); + let name = path.iter().map(|x| x.as_ref()).join("."); + let key = name.to_ascii_lowercase(); + + // If this object was used before, return it as it was originally + // implicitly declared. + if let Some(object) = self.pattern_names.get(&key) { + return object.clone(); + } + + // Try resolving as a user-definded type class. + let object = if let Some(type_class) = self.scope.as_ref().and_then(|x| { + x.resolve_type_class_from_ref(name.clone().into()) + .expect_not_ambiguous(y, |_, _| false) + .as_opt_item() + }) { + // If the type class is known to have enum parameters, declare + // references to the variant names so we can use them. + if let Some(def) = &type_class.definition { + for slot in def.parameter_slots.iter() { + if let meta::pattern::Value::Enum(Some(variants)) = &slot.pattern { + for variant in variants { + self.pattern_names.insert( + variant.to_ascii_lowercase(), + PatternObject::EnumVariant(variant.clone()), + ); + } + } + } + } + + PatternObject::TypeClass(data::Class::UserDefined(type_class)) + } else { + PatternObject::NamedBinding(name) + }; + + // Declare the new reference and return the referred object. + self.pattern_names.insert(key, object.clone()); + object + } + + /// Resolve a type variation identifier path. + pub fn resolve_type_variation( + &mut self, + x: I, + y: &mut context::Context, + class: &data::Class, + ) -> extension::simple::type_variation::Reference + where + S: AsRef, + I: Iterator, + { + let path = x.collect::>(); + let name = path.iter().map(|x| x.as_ref()).join("."); + + self.scope + .as_ref() + .map(|x| { + x.resolve_type_variation_from_ref(name.clone().into()) + .filter_items(|x| &x.base == class) + .expect_one( + y, + |x, y| { + diagnostic!( + y, + Error, + LinkMissingTypeVariationNameAndClass, + "{x} exists, but is not a variation of {class} data types" + ); + true + }, + |_, _| false, + ) + .as_item() + }) + .unwrap_or_else(|| Arc::new(name.into())) + } +} + +/// Error listener that just collects error messages into a vector, such that +/// they can be obtained when parsing completes. +#[derive(Default, Clone)] +struct ErrorListener { + messages: Rc>>, +} + +impl<'a, T: antlr_rust::recognizer::Recognizer<'a>> antlr_rust::error_listener::ErrorListener<'a, T> + for ErrorListener +{ + fn syntax_error( + &self, + _recognizer: &T, + _offending_symbol: Option<&<::TF as antlr_rust::token_factory::TokenFactory<'a>>::Inner>, + line: isize, + column: isize, + msg: &str, + _error: Option<&antlr_rust::errors::ANTLRError>, + ) { + self.messages + .borrow_mut() + .push(format!("at {line}:{column}: {msg}")); + } +} + +impl ErrorListener { + pub fn new() -> Self { + Self::default() + } + + pub fn to_context(&self, y: &mut context::Context) { + for message in self.messages.borrow_mut().drain(..) { + diagnostic!(y, Error, TypeParseError, "{message}"); + } + } +} + +// Boilerplate code for connecting ANTLR to our diagnostic system and parsing +// a simple string slice with it. +macro_rules! antlr_parse { + ($x:expr, $y:expr, $start:ident) => {{ + let lexer = substraittypelexer::SubstraitTypeLexer::new(antlr_rust::InputStream::new($x)); + let token_source = antlr_rust::common_token_stream::CommonTokenStream::new(lexer); + let mut parser = SubstraitTypeParser::new(token_source); + let listener = ErrorListener::new(); + parser.remove_error_listeners(); + parser.add_error_listener(Box::new(listener.clone())); + let result = parser.$start(); + listener.to_context($y); + result.map_err(|e| cause!(TypeParseError, "{e}")) + }}; +} + +// Boilerplate code for converting the awkward left-recursion-avoidance rules +// for expressions into a normal expression tree. +macro_rules! antlr_reduce_left_recursion { + ( + $x:expr, $y:expr, $z:expr, $x_typ:ty, + $all_operands:ident, $next_analyzer:expr, + $one_operator:ident, $operator_match:tt + ) => {{ + fn left_recursive( + x: &$x_typ, + y: &mut context::Context, + z: &mut AnalysisContext, + start: usize, + ) -> Result { + if start == 0 { + // Only one operand remaining. + Ok(antlr_hidden_child!(x, y, 0, $next_analyzer, z).unwrap_or_default()) + } else { + // We're traversing the tree bottom-up, so start with the last + // operation. The operations are evaluated left-to-right, so that's + // the rightmost operation. + let lhs = antlr_recurse!(x, y, lhs, left_recursive, z, start - 1) + .1 + .unwrap_or_default(); + let rhs = antlr_child!(x, y, rhs, start, $next_analyzer, z) + .1 + .unwrap_or_default(); + let function = x + .$one_operator(start - 1) + .map(|x| match x.as_ref() $operator_match) + .unwrap_or_default(); + Ok(meta::pattern::Value::Function(function, vec![lhs, rhs])) + } + } + + let total_operands = $x.$all_operands().len(); + left_recursive($x, $y, $z, total_operands - 1) + }}; +} + +/// Analyzes a string literal. +fn analyze_string>( + x: S, + _y: &mut context::Context, + _z: &mut AnalysisContext, +) -> Option { + let x = x.as_ref(); + if !x.starts_with('"') || !x.ends_with('"') || x.len() < 2 { + None + } else { + Some(x[1..x.len() - 1].to_string()) + } +} + +/// Analyzes an integer literal. +fn analyze_integer( + x: &IntegerContextAll, + y: &mut context::Context, + _z: &mut AnalysisContext, +) -> Result { + let value = if let Some(value) = x.Nonzero() { + value.symbol.text.parse().unwrap_or(i128::MAX) + } else { + 0i128 + }; + Ok(if x.Minus().is_some() { + match i64::try_from(-value) { + Ok(val) => val, + Err(_) => { + diagnostic!( + y, + Error, + TypeDerivationInvalid, + "integer literal is too small, minimum is {}", + i64::MIN + ); + i64::MIN + } + } + } else { + match i64::try_from(value) { + Ok(val) => val, + Err(_) => { + diagnostic!( + y, + Error, + TypeDerivationInvalid, + "integer literal is too large, maximum is {}", + i64::MAX + ); + i64::MAX + } + } + }) +} + +/// Analyzes and resolves an identifier path. +fn analyze_object_identifier( + x: &IdentifierPathContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + Ok(z.resolve_pattern( + x.Identifier_all().iter().map(|x| x.symbol.text.to_string()), + y, + )) +} + +/// Analyzes a pattern that can end up being either a data type pattern, a +/// binding, or an enum constant, depending on name resolution. +fn analyze_dtbc( + x: &DatatypeBindingOrConstantContext, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + let object = antlr_hidden_child!(x, y, 0, analyze_object_identifier, z) + .ok_or_else(|| cause!(TypeDerivationInvalid, "failed to resolve identifier"))?; + match object { + PatternObject::NamedBinding(name) => { + if x.variation().is_some() { + diagnostic!( + y, + Error, + TypeParseError, + "variation cannot be specified for bindings" + ); + } + if x.parameters().is_some() { + diagnostic!( + y, + Error, + TypeParseError, + "parameters cannot be specified for bindings" + ); + } + let nullability = x.nullability().map(|nullability| { + Arc::new(match nullability.as_ref() { + NullabilityContextAll::NullableContext(_) => { + meta::pattern::Value::Boolean(Some(true)) + } + NullabilityContextAll::NonNullableContext(_) => { + meta::pattern::Value::Boolean(Some(false)) + } + NullabilityContextAll::NullableIfContext(x) => { + antlr_child!(x, y, nullability, 0, analyze_pattern, z) + .1 + .unwrap_or_default() + } + NullabilityContextAll::Error(_) => meta::pattern::Value::Unresolved, + }) + }); + Ok(meta::pattern::Value::Binding(meta::pattern::Binding { + name, + inconsistent: false, + nullability, + })) + } + PatternObject::EnumVariant(name) => { + if x.nullability().is_some() { + diagnostic!( + y, + Error, + TypeParseError, + "nullability cannot be specified for enum literals" + ); + } + if x.variation().is_some() { + diagnostic!( + y, + Error, + TypeParseError, + "variation cannot be specified for enum literals" + ); + } + if x.parameters().is_some() { + diagnostic!( + y, + Error, + TypeParseError, + "parameters cannot be specified for enum literals" + ); + } + Ok(meta::pattern::Value::Enum(Some(vec![name]))) + } + PatternObject::TypeClass(class) => { + let nullable = if let Some(nullability) = x.nullability() { + match nullability.as_ref() { + NullabilityContextAll::NullableContext(_) => { + meta::pattern::Value::Boolean(Some(true)) + } + NullabilityContextAll::NonNullableContext(_) => { + meta::pattern::Value::Boolean(Some(false)) + } + NullabilityContextAll::NullableIfContext(x) => { + antlr_child!(x, y, nullability, 0, analyze_pattern, z) + .1 + .unwrap_or_default() + } + NullabilityContextAll::Error(_) => meta::pattern::Value::Unresolved, + } + } else { + meta::pattern::Value::Boolean(Some(false)) + }; + let variation = antlr_child!(x, y, variation, 0, analyze_type_variation, z, &class) + .1 + .unwrap_or(meta::pattern::Variation::Compatible); + let parameters = antlr_child!(x, y, parameters, 0, analyze_type_parameters, z).1; + Ok(meta::pattern::Value::DataType(Some( + meta::pattern::DataType { + class, + nullable: std::sync::Arc::new(nullable), + variation, + parameters, + }, + ))) + } + } +} + +/// Analyzes a type variation suffix. +fn analyze_type_variation_identifier( + x: &IdentifierPathContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, + class: &data::Class, +) -> Result { + Ok(z.resolve_type_variation( + x.Identifier_all().iter().map(|x| x.symbol.text.to_string()), + y, + class, + )) +} + +/// Analyzes a type variation suffix. +fn analyze_type_variation( + x: &VariationContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, + class: &data::Class, +) -> Result { + Ok(if let Some(x) = x.variationBody() { + match x.as_ref() { + VariationBodyContextAll::VarAnyContext(_) => meta::pattern::Variation::Any, + VariationBodyContextAll::VarSystemPreferredContext(_) => { + meta::pattern::Variation::Exactly(data::Variation::SystemPreferred) + } + VariationBodyContextAll::VarUserDefinedContext(x) => { + meta::pattern::Variation::Exactly(data::Variation::UserDefined( + antlr_child!( + x, + y, + variation, + 0, + analyze_type_variation_identifier, + z, + class + ) + .1 + .unwrap_or_default(), + )) + } + VariationBodyContextAll::Error(_) => meta::pattern::Variation::Any, + } + } else { + meta::pattern::Variation::Any + }) +} + +/// Analyzes a single type parameter. +fn analyze_type_parameter( + x: &ParameterContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + let name = x.identifierOrString().and_then(|name| { + let name = match name.as_ref() { + IdentifierOrStringContextAll::StrContext(x) => x + .String() + .and_then(|x| analyze_string(&x.symbol.text, y, z)) + .unwrap_or_default(), + IdentifierOrStringContextAll::IdentContext(x) => x + .Identifier() + .map(|x| x.symbol.text.to_string()) + .unwrap_or_default(), + IdentifierOrStringContextAll::Error(_) => String::from(""), + }; + if name.is_empty() { + diagnostic!( + y, + Error, + TypeInvalidFieldName, + "parameter names (if specified) cannot be empty" + ); + None + } else { + Some(name) + } + }); + + let value = if let Some(value) = x.parameterValue() { + match value.as_ref() { + ParameterValueContextAll::SpecifiedContext(x) => Some( + antlr_child!(x, y, pattern, 0, analyze_pattern, z) + .1 + .unwrap_or_default(), + ), + ParameterValueContextAll::NullContext(_) => None, + ParameterValueContextAll::Error(_) => Some(meta::pattern::Value::Unresolved), + } + } else { + Some(meta::pattern::Value::Unresolved) + }; + + Ok(meta::pattern::Parameter { name, value }) +} + +/// Analyzes a type parameter pack. +fn analyze_type_parameters( + x: &ParametersContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result> { + Ok(antlr_children!(x, y, argument, analyze_type_parameter, z) + .1 + .into_iter() + .map(|x| x.unwrap_or_default()) + .collect()) +} + +/// Analyzes miscellaneous pattern types. +fn analyze_pattern_misc( + x: &PatternMiscContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + match x { + PatternMiscContextAll::ParenthesesContext(x) => { + Ok(antlr_hidden_child!(x, y, 0, analyze_pattern, z).unwrap_or_default()) + } + PatternMiscContextAll::IfThenElseContext(x) => { + let condition = antlr_child!(x, y, condition, 0, analyze_pattern, z) + .1 + .unwrap_or_default(); + let if_true = antlr_child!(x, y, if_true, 1, analyze_pattern, z) + .1 + .unwrap_or_default(); + let if_false = antlr_child!(x, y, if_false, 2, analyze_pattern, z) + .1 + .unwrap_or_default(); + Ok(meta::pattern::Value::Function( + meta::Function::IfThenElse, + vec![condition, if_true, if_false], + )) + } + PatternMiscContextAll::UnaryNotContext(x) => { + let expression = antlr_child!(x, y, expression, 0, analyze_pattern, z) + .1 + .unwrap_or_default(); + Ok(meta::pattern::Value::Function( + meta::Function::Not, + vec![expression], + )) + } + PatternMiscContextAll::UnaryNegateContext(x) => { + let expression = antlr_child!(x, y, expression, 0, analyze_pattern, z) + .1 + .unwrap_or_default(); + Ok(meta::pattern::Value::Function( + meta::Function::Negate, + vec![expression], + )) + } + PatternMiscContextAll::AnyContext(_) => Ok(meta::pattern::Value::Any), + PatternMiscContextAll::BoolAnyContext(_) => Ok(meta::pattern::Value::Boolean(None)), + PatternMiscContextAll::BoolTrueContext(_) => Ok(meta::pattern::Value::Boolean(Some(true))), + PatternMiscContextAll::BoolFalseContext(_) => { + Ok(meta::pattern::Value::Boolean(Some(false))) + } + PatternMiscContextAll::IntAnyContext(_) => { + Ok(meta::pattern::Value::Integer(i64::MIN, i64::MAX)) + } + PatternMiscContextAll::IntRangeContext(x) => { + let lower = antlr_hidden_child!(x, y, 0, analyze_integer, z).unwrap_or(i64::MIN); + let upper = antlr_hidden_child!(x, y, 1, analyze_integer, z).unwrap_or(i64::MAX); + if lower > upper { + diagnostic!( + y, + Error, + TypeDerivationInvalid, + "lower bound of range is greater than upper bound" + ); + } + Ok(meta::pattern::Value::Integer(lower, upper)) + } + PatternMiscContextAll::IntAtMostContext(x) => { + let upper = antlr_hidden_child!(x, y, 0, analyze_integer, z).unwrap_or(i64::MAX); + Ok(meta::pattern::Value::Integer(i64::MIN, upper)) + } + PatternMiscContextAll::IntAtLeastContext(x) => { + let lower = antlr_hidden_child!(x, y, 0, analyze_integer, z).unwrap_or(i64::MAX); + Ok(meta::pattern::Value::Integer(lower, i64::MAX)) + } + PatternMiscContextAll::IntExactlyContext(x) => { + let value = antlr_hidden_child!(x, y, 0, analyze_integer, z).unwrap_or(i64::MAX); + Ok(meta::pattern::Value::Integer(value, value)) + } + PatternMiscContextAll::EnumAnyContext(_) => Ok(meta::pattern::Value::Enum(None)), + PatternMiscContextAll::EnumSetContext(x) => { + let names = x + .Identifier_all() + .iter() + .map(|x| x.symbol.text.to_string()) + .collect::>(); + let mut unique_names = HashSet::new(); + let mut repeated_names = HashSet::new(); + for name in names.iter() { + if !unique_names.insert(name.to_ascii_uppercase()) { + repeated_names.insert(name.to_ascii_uppercase()); + } + } + if !repeated_names.is_empty() { + diagnostic!( + y, + Error, + RedundantEnumVariant, + "enumeration variant names should be case-insensitively unique: {}", + repeated_names.iter().join(", ") + ); + } + Ok(meta::pattern::Value::Enum(Some(names))) + } + PatternMiscContextAll::StrAnyContext(_) => Ok(meta::pattern::Value::String(None)), + PatternMiscContextAll::StrExactlyContext(x) => { + let s = x + .String() + .and_then(|x| analyze_string(&x.symbol.text, y, z)); + if s.is_none() { + diagnostic!(y, Error, TypeParseError, "invalid string literal"); + } + Ok(meta::pattern::Value::String(s)) + } + PatternMiscContextAll::DtAnyContext(_) => Ok(meta::pattern::Value::DataType(None)), + PatternMiscContextAll::FunctionContext(x) => { + let function = x + .Identifier() + .and_then(|x| meta::Function::try_from(x.symbol.text.as_ref()).ok()); + if function.is_none() { + diagnostic!(y, Error, TypeParseError, "unknown function"); + } + let arguments = antlr_children!(x, y, argument, analyze_pattern, z) + .1 + .into_iter() + .map(|x| x.unwrap_or_default()) + .collect(); + Ok(meta::pattern::Value::Function( + function.unwrap_or_default(), + arguments, + )) + } + PatternMiscContextAll::DatatypeBindingOrConstantContext(x) => analyze_dtbc(x, y, z), + PatternMiscContextAll::InconsistentContext(x) => { + let name = x + .Identifier() + .map(|x| x.symbol.text.to_string()) + .unwrap_or_else(|| "!".to_string()); + + // Check that the name actually maps to a binding. + match z.resolve_pattern(std::iter::once(&name), y) { + PatternObject::NamedBinding(_) => (), + PatternObject::EnumVariant(x) => { + diagnostic!( + y, + Error, + TypeDerivationInvalid, + "{name} cannot be used as a binding; it maps to enum variant {x}" + ); + } + PatternObject::TypeClass(x) => { + diagnostic!( + y, + Error, + TypeDerivationInvalid, + "{name} cannot be used as a binding; it maps to type class {x}" + ); + } + } + + let nullability = x.nullability().map(|nullability| { + Arc::new(match nullability.as_ref() { + NullabilityContextAll::NullableContext(_) => { + meta::pattern::Value::Boolean(Some(true)) + } + NullabilityContextAll::NonNullableContext(_) => { + meta::pattern::Value::Boolean(Some(false)) + } + NullabilityContextAll::NullableIfContext(x) => { + antlr_child!(x, y, nullability, 0, analyze_pattern, z) + .1 + .unwrap_or_default() + } + NullabilityContextAll::Error(_) => meta::pattern::Value::Unresolved, + }) + }); + Ok(meta::pattern::Value::Binding(meta::pattern::Binding { + name, + inconsistent: true, + nullability, + })) + } + PatternMiscContextAll::Error(_) => Ok(meta::pattern::Value::Unresolved), + } +} + +/// Analyzes a set of zero or more a*b or a/b expressions. +fn analyze_pattern_mul_div( + x: &PatternMulDivContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + antlr_reduce_left_recursion!( + x, y, z, PatternMulDivContextAll, + patternMisc_all, analyze_pattern_misc, operatorMulDiv, + { + OperatorMulDivContextAll::MulContext(_) => meta::Function::Multiply, + OperatorMulDivContextAll::DivContext(_) => meta::Function::Divide, + OperatorMulDivContextAll::Error(_) => meta::Function::Unresolved, + } + ) +} + +/// Analyzes a set of zero or more a+b or a-b expressions. +fn analyze_pattern_add_sub( + x: &PatternAddSubContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + antlr_reduce_left_recursion!( + x, y, z, PatternAddSubContextAll, + patternMulDiv_all, analyze_pattern_mul_div, operatorAddSub, + { + OperatorAddSubContextAll::AddContext(_) => meta::Function::Add, + OperatorAddSubContextAll::SubContext(_) => meta::Function::Subtract, + OperatorAddSubContextAll::Error(_) => meta::Function::Unresolved, + } + ) +} + +/// Analyzes a set of zero or more integer inequality expressions. +fn analyze_pattern_ineq( + x: &PatternIneqContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + antlr_reduce_left_recursion!( + x, y, z, PatternIneqContextAll, + patternAddSub_all, analyze_pattern_add_sub, operatorIneq, + { + OperatorIneqContextAll::LtContext(_) => meta::Function::LessThan, + OperatorIneqContextAll::LeContext(_) => meta::Function::LessEqual, + OperatorIneqContextAll::GtContext(_) => meta::Function::GreaterThan, + OperatorIneqContextAll::GeContext(_) => meta::Function::GreaterEqual, + OperatorIneqContextAll::Error(_) => meta::Function::Unresolved, + } + ) +} + +/// Analyzes a set of zero or more x==y or x!=y expressions. +fn analyze_pattern_eq_neq( + x: &PatternEqNeqContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + antlr_reduce_left_recursion!( + x, y, z, PatternEqNeqContextAll, + patternIneq_all, analyze_pattern_ineq, operatorEqNeq, + { + OperatorEqNeqContextAll::EqContext(_) => meta::Function::Equal, + OperatorEqNeqContextAll::NeqContext(_) => meta::Function::NotEqual, + OperatorEqNeqContextAll::Error(_) => meta::Function::Unresolved, + } + ) +} + +/// Analyzes a set of zero or more x&&y expressions. +fn analyze_pattern_and( + x: &PatternAndContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + antlr_reduce_left_recursion!( + x, y, z, PatternAndContextAll, + patternEqNeq_all, analyze_pattern_eq_neq, operatorAnd, + { + OperatorAndContextAll::AndContext(_) => meta::Function::And, + OperatorAndContextAll::Error(_) => meta::Function::Unresolved, + } + ) +} + +/// Analyzes a set of zero or more x||y expressions. +fn analyze_pattern_or( + x: &PatternOrContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + antlr_reduce_left_recursion!( + x, y, z, PatternOrContextAll, + patternAnd_all, analyze_pattern_and, operatorOr, + { + OperatorOrContextAll::OrContext(_) => meta::Function::Or, + OperatorOrContextAll::Error(_) => meta::Function::Unresolved, + } + ) +} + +/// Analyzes a pattern parse tree node. +fn analyze_pattern( + x: &PatternContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + Ok(antlr_hidden_child!(x, y, 0, analyze_pattern_or, z).unwrap_or_default()) +} + +/// Analyzes a statement parse tree node. +fn analyze_statement( + x: &StatementContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + match x { + StatementContextAll::AssertContext(x) => { + let rhs_expression = antlr_child!(x, y, rhs, 0, analyze_pattern, z) + .1 + .unwrap_or_default(); + Ok(meta::program::Statement { + lhs_pattern: meta::pattern::Value::Boolean(Some(true)), + rhs_expression, + }) + } + StatementContextAll::NormalContext(x) => { + let rhs_expression = antlr_child!(x, y, rhs, 1, analyze_pattern, z) + .1 + .unwrap_or_default(); + let lhs_pattern = antlr_child!(x, y, lhs, 0, analyze_pattern, z) + .1 + .unwrap_or_default(); + Ok(meta::program::Statement { + lhs_pattern, + rhs_expression, + }) + } + StatementContextAll::MatchContext(x) => { + let rhs_expression = antlr_child!(x, y, rhs, 0, analyze_pattern, z) + .1 + .unwrap_or_default(); + let lhs_pattern = antlr_child!(x, y, lhs, 1, analyze_pattern, z) + .1 + .unwrap_or_default(); + Ok(meta::program::Statement { + lhs_pattern, + rhs_expression, + }) + } + StatementContextAll::Error(_) => Ok(meta::program::Statement::default()), + } +} + +/// Analyzes a program parse tree node. +fn analyze_program( + x: &ProgramContextAll, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + let statements = antlr_children!(x, y, statement, analyze_statement, z) + .1 + .into_iter() + .map(|x| x.unwrap_or_default()) + .collect(); + let expression = antlr_child!(x, y, pattern, 0, analyze_pattern, z) + .1 + .unwrap_or_default(); + Ok(meta::Program { + statements, + expression, + }) +} + +/// Parse a string as just the class part of a data type. +pub fn parse_class( + x: &str, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + // Only accept type classes. + if let PatternObject::TypeClass(class) = z.resolve_pattern(x.split('.'), y) { + Ok(class) + } else { + Err(cause!( + TypeResolutionError, + "could not resolve {x} as a type class" + )) + } +} + +/// Parse a string as a complete type. +pub fn parse_type( + x: &str, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + let pattern = parse_pattern(x, y, z)?; + let value = pattern.evaluate()?; + value.get_data_type().ok_or_else(|| { + cause!( + TypeDerivationInvalid, + "expected a data type, but received a pattern that evaluated to {value}" + ) + }) +} + +/// Parse a string as a meta-pattern. +pub fn parse_pattern( + x: &str, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + let x = antlr_parse!(x, y, startPattern)?; + let result = antlr_child!(x.as_ref(), y, pattern, 0, analyze_pattern, z) + .1 + .unwrap_or_default(); + Ok(result) +} + +/// Parse a string as a meta-program. +pub fn parse_program( + x: &str, + y: &mut context::Context, + z: &mut AnalysisContext, +) -> Result { + let x = antlr_parse!(x, y, startProgram)?; + let result = antlr_child!(x.as_ref(), y, program, 0, analyze_program, z) + .1 + .unwrap_or_default(); + Ok(result) +} + +#[cfg(test)] +mod test { + use super::*; + use crate::output::tree; + + #[test] + fn test() { + let mut node = tree::Node::from(tree::NodeType::ProtoMessage("test")); + let mut state = Default::default(); + let config = crate::Config::new(); + let mut context = context::Context::new("test", &mut node, &mut state, &config); + let mut analysis_context = AnalysisContext::new(None); + + /*let result = parse_program( + r#"init_scale = max(S1,S2) + init_prec = init_scale + max(P1 - S1, P2 - S2) + 1 + min_scale = min(init_scale, 6) + delta = init_prec - 38 + prec = min(init_prec, 38) + scale_after_borrow = max(init_scale - delta, min_scale) + scale = if init_prec > 38 then scale_after_borrow else init_scale + DECIMAL"#, + &mut context, + ) + .ok();*/ + + let result = parse_program(r#"1 + 2 * 3 - 4 / 5"#, &mut context, &mut analysis_context) + .unwrap_or_default(); + + let mut eval_context = meta::Context::default(); + assert_eq!( + result.evaluate(&mut eval_context), + Ok(meta::Value::Integer(7)) + ); + //panic!("{:#?}", _result); + //panic!("{node:#?}"); + //panic!("{:#?}", _result.to_string_tree(&*parser)); + } +} diff --git a/rs/src/parse/extensions/simple/derivations/substraittypelexer.rs b/rs/src/parse/extensions/simple/derivations/substraittypelexer.rs new file mode 100644 index 00000000..d9365a4c --- /dev/null +++ b/rs/src/parse/extensions/simple/derivations/substraittypelexer.rs @@ -0,0 +1,494 @@ +// SPDX-License-Identifier: Apache-2.0 +#![allow(clippy::all)] +#![cfg_attr(rustfmt, rustfmt_skip)] +// Generated from SubstraitType.g4 by ANTLR 4.8 +#![allow(dead_code)] +#![allow(nonstandard_style)] +#![allow(unused_imports)] +#![allow(unused_variables)] +use antlr_rust::atn::ATN; +use antlr_rust::char_stream::CharStream; +use antlr_rust::int_stream::IntStream; +use antlr_rust::lexer::{BaseLexer, Lexer, LexerRecog}; +use antlr_rust::atn_deserializer::ATNDeserializer; +use antlr_rust::dfa::DFA; +use antlr_rust::lexer_atn_simulator::{LexerATNSimulator, ILexerATNSimulator}; +use antlr_rust::PredictionContextCache; +use antlr_rust::recognizer::{Recognizer,Actions}; +use antlr_rust::error_listener::ErrorListener; +use antlr_rust::TokenSource; +use antlr_rust::token_factory::{TokenFactory,CommonTokenFactory,TokenAware}; +use antlr_rust::token::*; +use antlr_rust::rule_context::{BaseRuleContext,EmptyCustomRuleContext,EmptyContext}; +use antlr_rust::parser_rule_context::{ParserRuleContext,BaseParserRuleContext,cast}; +use antlr_rust::vocabulary::{Vocabulary,VocabularyImpl}; + +use antlr_rust::{lazy_static,Tid,TidAble,TidExt}; + +use std::sync::Arc; +use std::cell::RefCell; +use std::rc::Rc; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; + + + pub const LineComment:isize=1; + pub const BlockComment:isize=2; + pub const Whitespace:isize=3; + pub const Newline:isize=4; + pub const EscNewline:isize=5; + pub const Assert:isize=6; + pub const Matches:isize=7; + pub const If:isize=8; + pub const Then:isize=9; + pub const Else:isize=10; + pub const Null:isize=11; + pub const True:isize=12; + pub const False:isize=13; + pub const Metabool:isize=14; + pub const Metaint:isize=15; + pub const Metaenum:isize=16; + pub const Metastr:isize=17; + pub const Typename:isize=18; + pub const Period:isize=19; + pub const Comma:isize=20; + pub const Colon:isize=21; + pub const Semicolon:isize=22; + pub const Question:isize=23; + pub const Bang:isize=24; + pub const OpenParen:isize=25; + pub const CloseParen:isize=26; + pub const OpenCurly:isize=27; + pub const CloseCurly:isize=28; + pub const OpenSquare:isize=29; + pub const CloseSquare:isize=30; + pub const Assign:isize=31; + pub const BooleanOr:isize=32; + pub const BooleanAnd:isize=33; + pub const Equal:isize=34; + pub const NotEqual:isize=35; + pub const LessThan:isize=36; + pub const LessEqual:isize=37; + pub const GreaterThan:isize=38; + pub const GreaterEqual:isize=39; + pub const Plus:isize=40; + pub const Minus:isize=41; + pub const Multiply:isize=42; + pub const Divide:isize=43; + pub const Range:isize=44; + pub const Nonzero:isize=45; + pub const Zero:isize=46; + pub const String:isize=47; + pub const Identifier:isize=48; + pub const channelNames: [&'static str;0+2] = [ + "DEFAULT_TOKEN_CHANNEL", "HIDDEN" + ]; + + pub const modeNames: [&'static str;1] = [ + "DEFAULT_MODE" + ]; + + pub const ruleNames: [&'static str;74] = [ + "LineComment", "BlockComment", "Whitespace", "Newline", "EscNewline", + "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", + "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "Assert", + "Matches", "If", "Then", "Else", "Null", "True", "False", "Metabool", + "Metaint", "Metaenum", "Metastr", "Typename", "Period", "Comma", "Colon", + "Semicolon", "Question", "Bang", "OpenParen", "CloseParen", "OpenCurly", + "CloseCurly", "OpenSquare", "CloseSquare", "Assign", "BooleanOr", "BooleanAnd", + "Equal", "NotEqual", "LessThan", "LessEqual", "GreaterThan", "GreaterEqual", + "Plus", "Minus", "Multiply", "Divide", "Range", "Nonzero", "Zero", "String", + "Identifier" + ]; + + + pub const _LITERAL_NAMES: [Option<&'static str>;47] = [ + None, None, None, None, None, None, None, None, None, None, None, None, + None, None, None, None, None, None, None, Some("'.'"), Some("','"), Some("':'"), + Some("';'"), Some("'?'"), Some("'!'"), Some("'('"), Some("')'"), Some("'{'"), + Some("'}'"), Some("'['"), Some("']'"), Some("'='"), Some("'||'"), Some("'&&'"), + Some("'=='"), Some("'!='"), Some("'<'"), Some("'<='"), Some("'>'"), Some("'>='"), + Some("'+'"), Some("'-'"), Some("'*'"), Some("'/'"), Some("'..'"), None, + Some("'0'") + ]; + pub const _SYMBOLIC_NAMES: [Option<&'static str>;49] = [ + None, Some("LineComment"), Some("BlockComment"), Some("Whitespace"), Some("Newline"), + Some("EscNewline"), Some("Assert"), Some("Matches"), Some("If"), Some("Then"), + Some("Else"), Some("Null"), Some("True"), Some("False"), Some("Metabool"), + Some("Metaint"), Some("Metaenum"), Some("Metastr"), Some("Typename"), + Some("Period"), Some("Comma"), Some("Colon"), Some("Semicolon"), Some("Question"), + Some("Bang"), Some("OpenParen"), Some("CloseParen"), Some("OpenCurly"), + Some("CloseCurly"), Some("OpenSquare"), Some("CloseSquare"), Some("Assign"), + Some("BooleanOr"), Some("BooleanAnd"), Some("Equal"), Some("NotEqual"), + Some("LessThan"), Some("LessEqual"), Some("GreaterThan"), Some("GreaterEqual"), + Some("Plus"), Some("Minus"), Some("Multiply"), Some("Divide"), Some("Range"), + Some("Nonzero"), Some("Zero"), Some("String"), Some("Identifier") + ]; + lazy_static!{ + static ref _shared_context_cache: Arc = Arc::new(PredictionContextCache::new()); + static ref VOCABULARY: Box = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); + } + + +pub type LexerContext<'input> = BaseRuleContext<'input,EmptyCustomRuleContext<'input,LocalTokenFactory<'input> >>; +pub type LocalTokenFactory<'input> = CommonTokenFactory; + +type From<'a> = as TokenFactory<'a> >::From; + +pub struct SubstraitTypeLexer<'input, Input:CharStream >> { + base: BaseLexer<'input,SubstraitTypeLexerActions,Input,LocalTokenFactory<'input>>, +} + +antlr_rust::tid! { impl<'input,Input> TidAble<'input> for SubstraitTypeLexer<'input,Input> where Input:CharStream > } + +impl<'input, Input:CharStream >> Deref for SubstraitTypeLexer<'input,Input>{ + type Target = BaseLexer<'input,SubstraitTypeLexerActions,Input,LocalTokenFactory<'input>>; + + fn deref(&self) -> &Self::Target { + &self.base + } +} + +impl<'input, Input:CharStream >> DerefMut for SubstraitTypeLexer<'input,Input>{ + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.base + } +} + + +impl<'input, Input:CharStream >> SubstraitTypeLexer<'input,Input>{ + fn get_rule_names(&self) -> &'static [&'static str] { + &ruleNames + } + fn get_literal_names(&self) -> &[Option<&str>] { + &_LITERAL_NAMES + } + + fn get_symbolic_names(&self) -> &[Option<&str>] { + &_SYMBOLIC_NAMES + } + + fn get_grammar_file_name(&self) -> &'static str { + "SubstraitTypeLexer.g4" + } + + pub fn new_with_token_factory(input: Input, tf: &'input LocalTokenFactory<'input>) -> Self { + antlr_rust::recognizer::check_version("0","3"); + Self { + base: BaseLexer::new_base_lexer( + input, + LexerATNSimulator::new_lexer_atnsimulator( + _ATN.clone(), + _decision_to_DFA.clone(), + _shared_context_cache.clone(), + ), + SubstraitTypeLexerActions{}, + tf + ) + } + } +} + +impl<'input, Input:CharStream >> SubstraitTypeLexer<'input,Input> where &'input LocalTokenFactory<'input>:Default{ + pub fn new(input: Input) -> Self{ + SubstraitTypeLexer::new_with_token_factory(input, <&LocalTokenFactory<'input> as Default>::default()) + } +} + +pub struct SubstraitTypeLexerActions { +} + +impl SubstraitTypeLexerActions{ +} + +impl<'input, Input:CharStream >> Actions<'input,BaseLexer<'input,SubstraitTypeLexerActions,Input,LocalTokenFactory<'input>>> for SubstraitTypeLexerActions{ + } + + impl<'input, Input:CharStream >> SubstraitTypeLexer<'input,Input>{ + +} + +impl<'input, Input:CharStream >> LexerRecog<'input,BaseLexer<'input,SubstraitTypeLexerActions,Input,LocalTokenFactory<'input>>> for SubstraitTypeLexerActions{ +} +impl<'input> TokenAware<'input> for SubstraitTypeLexerActions{ + type TF = LocalTokenFactory<'input>; +} + +impl<'input, Input:CharStream >> TokenSource<'input> for SubstraitTypeLexer<'input,Input>{ + type TF = LocalTokenFactory<'input>; + + fn next_token(&mut self) -> >::Tok { + self.base.next_token() + } + + fn get_line(&self) -> isize { + self.base.get_line() + } + + fn get_char_position_in_line(&self) -> isize { + self.base.get_char_position_in_line() + } + + fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { + self.base.get_input_stream() + } + + fn get_source_name(&self) -> String { + self.base.get_source_name() + } + + fn get_token_factory(&self) -> &'input Self::TF { + self.base.get_token_factory() + } +} + + + + lazy_static! { + static ref _ATN: Arc = + Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); + static ref _decision_to_DFA: Arc>> = { + let mut dfa = Vec::new(); + let size = _ATN.decision_to_state.len(); + for i in 0..size { + dfa.push(DFA::new( + _ATN.clone(), + _ATN.get_decision_state(i), + i as isize, + ).into()) + } + Arc::new(dfa) + }; + } + + + + const _serializedATN:&'static str = + "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ + \x32\u{1a7}\x08\x01\x04\x02\x09\x02\x04\x03\x09\x03\x04\x04\x09\x04\x04\ + \x05\x09\x05\x04\x06\x09\x06\x04\x07\x09\x07\x04\x08\x09\x08\x04\x09\x09\ + \x09\x04\x0a\x09\x0a\x04\x0b\x09\x0b\x04\x0c\x09\x0c\x04\x0d\x09\x0d\x04\ + \x0e\x09\x0e\x04\x0f\x09\x0f\x04\x10\x09\x10\x04\x11\x09\x11\x04\x12\x09\ + \x12\x04\x13\x09\x13\x04\x14\x09\x14\x04\x15\x09\x15\x04\x16\x09\x16\x04\ + \x17\x09\x17\x04\x18\x09\x18\x04\x19\x09\x19\x04\x1a\x09\x1a\x04\x1b\x09\ + \x1b\x04\x1c\x09\x1c\x04\x1d\x09\x1d\x04\x1e\x09\x1e\x04\x1f\x09\x1f\x04\ + \x20\x09\x20\x04\x21\x09\x21\x04\x22\x09\x22\x04\x23\x09\x23\x04\x24\x09\ + \x24\x04\x25\x09\x25\x04\x26\x09\x26\x04\x27\x09\x27\x04\x28\x09\x28\x04\ + \x29\x09\x29\x04\x2a\x09\x2a\x04\x2b\x09\x2b\x04\x2c\x09\x2c\x04\x2d\x09\ + \x2d\x04\x2e\x09\x2e\x04\x2f\x09\x2f\x04\x30\x09\x30\x04\x31\x09\x31\x04\ + \x32\x09\x32\x04\x33\x09\x33\x04\x34\x09\x34\x04\x35\x09\x35\x04\x36\x09\ + \x36\x04\x37\x09\x37\x04\x38\x09\x38\x04\x39\x09\x39\x04\x3a\x09\x3a\x04\ + \x3b\x09\x3b\x04\x3c\x09\x3c\x04\x3d\x09\x3d\x04\x3e\x09\x3e\x04\x3f\x09\ + \x3f\x04\x40\x09\x40\x04\x41\x09\x41\x04\x42\x09\x42\x04\x43\x09\x43\x04\ + \x44\x09\x44\x04\x45\x09\x45\x04\x46\x09\x46\x04\x47\x09\x47\x04\x48\x09\ + \x48\x04\x49\x09\x49\x04\x4a\x09\x4a\x04\x4b\x09\x4b\x03\x02\x03\x02\x03\ + \x02\x03\x02\x07\x02\u{9c}\x0a\x02\x0c\x02\x0e\x02\u{9f}\x0b\x02\x03\x02\ + \x03\x02\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x06\x03\u{a8}\x0a\x03\ + \x0d\x03\x0e\x03\u{a9}\x03\x03\x05\x03\u{ad}\x0a\x03\x03\x03\x07\x03\u{b0}\ + \x0a\x03\x0c\x03\x0e\x03\u{b3}\x0b\x03\x03\x03\x03\x03\x03\x03\x03\x03\ + \x03\x03\x03\x04\x06\x04\u{bb}\x0a\x04\x0d\x04\x0e\x04\u{bc}\x03\x04\x03\ + \x04\x03\x05\x06\x05\u{c2}\x0a\x05\x0d\x05\x0e\x05\u{c3}\x03\x06\x03\x06\ + \x06\x06\u{c8}\x0a\x06\x0d\x06\x0e\x06\u{c9}\x03\x06\x03\x06\x03\x07\x03\ + \x07\x03\x08\x03\x08\x03\x09\x03\x09\x03\x0a\x03\x0a\x03\x0b\x03\x0b\x03\ + \x0c\x03\x0c\x03\x0d\x03\x0d\x03\x0e\x03\x0e\x03\x0f\x03\x0f\x03\x10\x03\ + \x10\x03\x11\x03\x11\x03\x12\x03\x12\x03\x13\x03\x13\x03\x14\x03\x14\x03\ + \x15\x03\x15\x03\x16\x03\x16\x03\x17\x03\x17\x03\x18\x03\x18\x03\x19\x03\ + \x19\x03\x1a\x03\x1a\x03\x1b\x03\x1b\x03\x1c\x03\x1c\x03\x1d\x03\x1d\x03\ + \x1e\x03\x1e\x03\x1f\x03\x1f\x03\x20\x03\x20\x03\x21\x03\x21\x03\x21\x03\ + \x21\x03\x21\x03\x21\x03\x21\x03\x22\x03\x22\x03\x22\x03\x22\x03\x22\x03\ + \x22\x03\x22\x03\x22\x03\x23\x03\x23\x03\x23\x03\x24\x03\x24\x03\x24\x03\ + \x24\x03\x24\x03\x25\x03\x25\x03\x25\x03\x25\x03\x25\x03\x26\x03\x26\x03\ + \x26\x03\x26\x03\x26\x03\x27\x03\x27\x03\x27\x03\x27\x03\x27\x03\x28\x03\ + \x28\x03\x28\x03\x28\x03\x28\x03\x28\x03\x29\x03\x29\x03\x29\x03\x29\x03\ + \x29\x03\x29\x03\x29\x03\x29\x03\x29\x03\x2a\x03\x2a\x03\x2a\x03\x2a\x03\ + \x2a\x03\x2a\x03\x2a\x03\x2a\x03\x2b\x03\x2b\x03\x2b\x03\x2b\x03\x2b\x03\ + \x2b\x03\x2b\x03\x2b\x03\x2b\x03\x2c\x03\x2c\x03\x2c\x03\x2c\x03\x2c\x03\ + \x2c\x03\x2c\x03\x2c\x03\x2d\x03\x2d\x03\x2d\x03\x2d\x03\x2d\x03\x2d\x03\ + \x2d\x03\x2d\x03\x2d\x03\x2e\x03\x2e\x03\x2f\x03\x2f\x03\x30\x03\x30\x03\ + \x31\x03\x31\x03\x32\x03\x32\x03\x33\x03\x33\x03\x34\x03\x34\x03\x35\x03\ + \x35\x03\x36\x03\x36\x03\x37\x03\x37\x03\x38\x03\x38\x03\x39\x03\x39\x03\ + \x3a\x03\x3a\x03\x3b\x03\x3b\x03\x3b\x03\x3c\x03\x3c\x03\x3c\x03\x3d\x03\ + \x3d\x03\x3d\x03\x3e\x03\x3e\x03\x3e\x03\x3f\x03\x3f\x03\x40\x03\x40\x03\ + \x40\x03\x41\x03\x41\x03\x42\x03\x42\x03\x42\x03\x43\x03\x43\x03\x44\x03\ + \x44\x03\x45\x03\x45\x03\x46\x03\x46\x03\x47\x03\x47\x03\x47\x03\x48\x03\ + \x48\x07\x48\u{196}\x0a\x48\x0c\x48\x0e\x48\u{199}\x0b\x48\x03\x49\x03\ + \x49\x03\x4a\x03\x4a\x03\x4a\x03\x4a\x03\x4b\x03\x4b\x07\x4b\u{1a3}\x0a\ + \x4b\x0c\x4b\x0e\x4b\u{1a6}\x0b\x4b\x02\x02\x4c\x03\x03\x05\x04\x07\x05\ + \x09\x06\x0b\x07\x0d\x02\x0f\x02\x11\x02\x13\x02\x15\x02\x17\x02\x19\x02\ + \x1b\x02\x1d\x02\x1f\x02\x21\x02\x23\x02\x25\x02\x27\x02\x29\x02\x2b\x02\ + \x2d\x02\x2f\x02\x31\x02\x33\x02\x35\x02\x37\x02\x39\x02\x3b\x02\x3d\x02\ + \x3f\x02\x41\x08\x43\x09\x45\x0a\x47\x0b\x49\x0c\x4b\x0d\x4d\x0e\x4f\x0f\ + \x51\x10\x53\x11\x55\x12\x57\x13\x59\x14\x5b\x15\x5d\x16\x5f\x17\x61\x18\ + \x63\x19\x65\x1a\x67\x1b\x69\x1c\x6b\x1d\x6d\x1e\x6f\x1f\x71\x20\x73\x21\ + \x75\x22\x77\x23\x79\x24\x7b\x25\x7d\x26\x7f\x27\u{81}\x28\u{83}\x29\u{85}\ + \x2a\u{87}\x2b\u{89}\x2c\u{8b}\x2d\u{8d}\x2e\u{8f}\x2f\u{91}\x30\u{93}\ + \x31\u{95}\x32\x03\x02\x25\x04\x02\x0c\x0c\x0f\x0f\x03\x02\x2c\x2c\x04\ + \x02\x2c\x2c\x31\x31\x04\x02\x0b\x0b\x22\x22\x04\x02\x43\x43\x63\x63\x04\ + \x02\x44\x44\x64\x64\x04\x02\x45\x45\x65\x65\x04\x02\x46\x46\x66\x66\x04\ + \x02\x47\x47\x67\x67\x04\x02\x48\x48\x68\x68\x04\x02\x49\x49\x69\x69\x04\ + \x02\x4a\x4a\x6a\x6a\x04\x02\x4b\x4b\x6b\x6b\x04\x02\x4c\x4c\x6c\x6c\x04\ + \x02\x4d\x4d\x6d\x6d\x04\x02\x4e\x4e\x6e\x6e\x04\x02\x4f\x4f\x6f\x6f\x04\ + \x02\x50\x50\x70\x70\x04\x02\x51\x51\x71\x71\x04\x02\x52\x52\x72\x72\x04\ + \x02\x53\x53\x73\x73\x04\x02\x54\x54\x74\x74\x04\x02\x55\x55\x75\x75\x04\ + \x02\x56\x56\x76\x76\x04\x02\x57\x57\x77\x77\x04\x02\x58\x58\x78\x78\x04\ + \x02\x59\x59\x79\x79\x04\x02\x5a\x5a\x7a\x7a\x04\x02\x5b\x5b\x7b\x7b\x04\ + \x02\x5c\x5c\x7c\x7c\x03\x02\x33\x3b\x03\x02\x32\x3b\x03\x02\x24\x24\x06\ + \x02\x26\x26\x43\x5c\x61\x61\x63\x7c\x07\x02\x26\x26\x32\x3b\x43\x5c\x61\ + \x61\x63\x7c\x02\u{195}\x02\x03\x03\x02\x02\x02\x02\x05\x03\x02\x02\x02\ + \x02\x07\x03\x02\x02\x02\x02\x09\x03\x02\x02\x02\x02\x0b\x03\x02\x02\x02\ + \x02\x41\x03\x02\x02\x02\x02\x43\x03\x02\x02\x02\x02\x45\x03\x02\x02\x02\ + \x02\x47\x03\x02\x02\x02\x02\x49\x03\x02\x02\x02\x02\x4b\x03\x02\x02\x02\ + \x02\x4d\x03\x02\x02\x02\x02\x4f\x03\x02\x02\x02\x02\x51\x03\x02\x02\x02\ + \x02\x53\x03\x02\x02\x02\x02\x55\x03\x02\x02\x02\x02\x57\x03\x02\x02\x02\ + \x02\x59\x03\x02\x02\x02\x02\x5b\x03\x02\x02\x02\x02\x5d\x03\x02\x02\x02\ + \x02\x5f\x03\x02\x02\x02\x02\x61\x03\x02\x02\x02\x02\x63\x03\x02\x02\x02\ + \x02\x65\x03\x02\x02\x02\x02\x67\x03\x02\x02\x02\x02\x69\x03\x02\x02\x02\ + \x02\x6b\x03\x02\x02\x02\x02\x6d\x03\x02\x02\x02\x02\x6f\x03\x02\x02\x02\ + \x02\x71\x03\x02\x02\x02\x02\x73\x03\x02\x02\x02\x02\x75\x03\x02\x02\x02\ + \x02\x77\x03\x02\x02\x02\x02\x79\x03\x02\x02\x02\x02\x7b\x03\x02\x02\x02\ + \x02\x7d\x03\x02\x02\x02\x02\x7f\x03\x02\x02\x02\x02\u{81}\x03\x02\x02\ + \x02\x02\u{83}\x03\x02\x02\x02\x02\u{85}\x03\x02\x02\x02\x02\u{87}\x03\ + \x02\x02\x02\x02\u{89}\x03\x02\x02\x02\x02\u{8b}\x03\x02\x02\x02\x02\u{8d}\ + \x03\x02\x02\x02\x02\u{8f}\x03\x02\x02\x02\x02\u{91}\x03\x02\x02\x02\x02\ + \u{93}\x03\x02\x02\x02\x02\u{95}\x03\x02\x02\x02\x03\u{97}\x03\x02\x02\ + \x02\x05\u{a2}\x03\x02\x02\x02\x07\u{ba}\x03\x02\x02\x02\x09\u{c1}\x03\ + \x02\x02\x02\x0b\u{c5}\x03\x02\x02\x02\x0d\u{cd}\x03\x02\x02\x02\x0f\u{cf}\ + \x03\x02\x02\x02\x11\u{d1}\x03\x02\x02\x02\x13\u{d3}\x03\x02\x02\x02\x15\ + \u{d5}\x03\x02\x02\x02\x17\u{d7}\x03\x02\x02\x02\x19\u{d9}\x03\x02\x02\ + \x02\x1b\u{db}\x03\x02\x02\x02\x1d\u{dd}\x03\x02\x02\x02\x1f\u{df}\x03\ + \x02\x02\x02\x21\u{e1}\x03\x02\x02\x02\x23\u{e3}\x03\x02\x02\x02\x25\u{e5}\ + \x03\x02\x02\x02\x27\u{e7}\x03\x02\x02\x02\x29\u{e9}\x03\x02\x02\x02\x2b\ + \u{eb}\x03\x02\x02\x02\x2d\u{ed}\x03\x02\x02\x02\x2f\u{ef}\x03\x02\x02\ + \x02\x31\u{f1}\x03\x02\x02\x02\x33\u{f3}\x03\x02\x02\x02\x35\u{f5}\x03\ + \x02\x02\x02\x37\u{f7}\x03\x02\x02\x02\x39\u{f9}\x03\x02\x02\x02\x3b\u{fb}\ + \x03\x02\x02\x02\x3d\u{fd}\x03\x02\x02\x02\x3f\u{ff}\x03\x02\x02\x02\x41\ + \u{101}\x03\x02\x02\x02\x43\u{108}\x03\x02\x02\x02\x45\u{110}\x03\x02\x02\ + \x02\x47\u{113}\x03\x02\x02\x02\x49\u{118}\x03\x02\x02\x02\x4b\u{11d}\x03\ + \x02\x02\x02\x4d\u{122}\x03\x02\x02\x02\x4f\u{127}\x03\x02\x02\x02\x51\ + \u{12d}\x03\x02\x02\x02\x53\u{136}\x03\x02\x02\x02\x55\u{13e}\x03\x02\x02\ + \x02\x57\u{147}\x03\x02\x02\x02\x59\u{14f}\x03\x02\x02\x02\x5b\u{158}\x03\ + \x02\x02\x02\x5d\u{15a}\x03\x02\x02\x02\x5f\u{15c}\x03\x02\x02\x02\x61\ + \u{15e}\x03\x02\x02\x02\x63\u{160}\x03\x02\x02\x02\x65\u{162}\x03\x02\x02\ + \x02\x67\u{164}\x03\x02\x02\x02\x69\u{166}\x03\x02\x02\x02\x6b\u{168}\x03\ + \x02\x02\x02\x6d\u{16a}\x03\x02\x02\x02\x6f\u{16c}\x03\x02\x02\x02\x71\ + \u{16e}\x03\x02\x02\x02\x73\u{170}\x03\x02\x02\x02\x75\u{172}\x03\x02\x02\ + \x02\x77\u{175}\x03\x02\x02\x02\x79\u{178}\x03\x02\x02\x02\x7b\u{17b}\x03\ + \x02\x02\x02\x7d\u{17e}\x03\x02\x02\x02\x7f\u{180}\x03\x02\x02\x02\u{81}\ + \u{183}\x03\x02\x02\x02\u{83}\u{185}\x03\x02\x02\x02\u{85}\u{188}\x03\x02\ + \x02\x02\u{87}\u{18a}\x03\x02\x02\x02\u{89}\u{18c}\x03\x02\x02\x02\u{8b}\ + \u{18e}\x03\x02\x02\x02\u{8d}\u{190}\x03\x02\x02\x02\u{8f}\u{193}\x03\x02\ + \x02\x02\u{91}\u{19a}\x03\x02\x02\x02\u{93}\u{19c}\x03\x02\x02\x02\u{95}\ + \u{1a0}\x03\x02\x02\x02\u{97}\u{98}\x07\x31\x02\x02\u{98}\u{99}\x07\x31\ + \x02\x02\u{99}\u{9d}\x03\x02\x02\x02\u{9a}\u{9c}\x0a\x02\x02\x02\u{9b}\ + \u{9a}\x03\x02\x02\x02\u{9c}\u{9f}\x03\x02\x02\x02\u{9d}\u{9b}\x03\x02\ + \x02\x02\u{9d}\u{9e}\x03\x02\x02\x02\u{9e}\u{a0}\x03\x02\x02\x02\u{9f}\ + \u{9d}\x03\x02\x02\x02\u{a0}\u{a1}\x08\x02\x02\x02\u{a1}\x04\x03\x02\x02\ + \x02\u{a2}\u{a3}\x07\x31\x02\x02\u{a3}\u{a4}\x07\x2c\x02\x02\u{a4}\u{ac}\ + \x03\x02\x02\x02\u{a5}\u{ad}\x0a\x03\x02\x02\u{a6}\u{a8}\x07\x2c\x02\x02\ + \u{a7}\u{a6}\x03\x02\x02\x02\u{a8}\u{a9}\x03\x02\x02\x02\u{a9}\u{a7}\x03\ + \x02\x02\x02\u{a9}\u{aa}\x03\x02\x02\x02\u{aa}\u{ab}\x03\x02\x02\x02\u{ab}\ + \u{ad}\x0a\x04\x02\x02\u{ac}\u{a5}\x03\x02\x02\x02\u{ac}\u{a7}\x03\x02\ + \x02\x02\u{ad}\u{b1}\x03\x02\x02\x02\u{ae}\u{b0}\x07\x2c\x02\x02\u{af}\ + \u{ae}\x03\x02\x02\x02\u{b0}\u{b3}\x03\x02\x02\x02\u{b1}\u{af}\x03\x02\ + \x02\x02\u{b1}\u{b2}\x03\x02\x02\x02\u{b2}\u{b4}\x03\x02\x02\x02\u{b3}\ + \u{b1}\x03\x02\x02\x02\u{b4}\u{b5}\x07\x2c\x02\x02\u{b5}\u{b6}\x07\x31\ + \x02\x02\u{b6}\u{b7}\x03\x02\x02\x02\u{b7}\u{b8}\x08\x03\x02\x02\u{b8}\ + \x06\x03\x02\x02\x02\u{b9}\u{bb}\x09\x05\x02\x02\u{ba}\u{b9}\x03\x02\x02\ + \x02\u{bb}\u{bc}\x03\x02\x02\x02\u{bc}\u{ba}\x03\x02\x02\x02\u{bc}\u{bd}\ + \x03\x02\x02\x02\u{bd}\u{be}\x03\x02\x02\x02\u{be}\u{bf}\x08\x04\x02\x02\ + \u{bf}\x08\x03\x02\x02\x02\u{c0}\u{c2}\x09\x02\x02\x02\u{c1}\u{c0}\x03\ + \x02\x02\x02\u{c2}\u{c3}\x03\x02\x02\x02\u{c3}\u{c1}\x03\x02\x02\x02\u{c3}\ + \u{c4}\x03\x02\x02\x02\u{c4}\x0a\x03\x02\x02\x02\u{c5}\u{c7}\x07\x5e\x02\ + \x02\u{c6}\u{c8}\x09\x02\x02\x02\u{c7}\u{c6}\x03\x02\x02\x02\u{c8}\u{c9}\ + \x03\x02\x02\x02\u{c9}\u{c7}\x03\x02\x02\x02\u{c9}\u{ca}\x03\x02\x02\x02\ + \u{ca}\u{cb}\x03\x02\x02\x02\u{cb}\u{cc}\x08\x06\x02\x02\u{cc}\x0c\x03\ + \x02\x02\x02\u{cd}\u{ce}\x09\x06\x02\x02\u{ce}\x0e\x03\x02\x02\x02\u{cf}\ + \u{d0}\x09\x07\x02\x02\u{d0}\x10\x03\x02\x02\x02\u{d1}\u{d2}\x09\x08\x02\ + \x02\u{d2}\x12\x03\x02\x02\x02\u{d3}\u{d4}\x09\x09\x02\x02\u{d4}\x14\x03\ + \x02\x02\x02\u{d5}\u{d6}\x09\x0a\x02\x02\u{d6}\x16\x03\x02\x02\x02\u{d7}\ + \u{d8}\x09\x0b\x02\x02\u{d8}\x18\x03\x02\x02\x02\u{d9}\u{da}\x09\x0c\x02\ + \x02\u{da}\x1a\x03\x02\x02\x02\u{db}\u{dc}\x09\x0d\x02\x02\u{dc}\x1c\x03\ + \x02\x02\x02\u{dd}\u{de}\x09\x0e\x02\x02\u{de}\x1e\x03\x02\x02\x02\u{df}\ + \u{e0}\x09\x0f\x02\x02\u{e0}\x20\x03\x02\x02\x02\u{e1}\u{e2}\x09\x10\x02\ + \x02\u{e2}\x22\x03\x02\x02\x02\u{e3}\u{e4}\x09\x11\x02\x02\u{e4}\x24\x03\ + \x02\x02\x02\u{e5}\u{e6}\x09\x12\x02\x02\u{e6}\x26\x03\x02\x02\x02\u{e7}\ + \u{e8}\x09\x13\x02\x02\u{e8}\x28\x03\x02\x02\x02\u{e9}\u{ea}\x09\x14\x02\ + \x02\u{ea}\x2a\x03\x02\x02\x02\u{eb}\u{ec}\x09\x15\x02\x02\u{ec}\x2c\x03\ + \x02\x02\x02\u{ed}\u{ee}\x09\x16\x02\x02\u{ee}\x2e\x03\x02\x02\x02\u{ef}\ + \u{f0}\x09\x17\x02\x02\u{f0}\x30\x03\x02\x02\x02\u{f1}\u{f2}\x09\x18\x02\ + \x02\u{f2}\x32\x03\x02\x02\x02\u{f3}\u{f4}\x09\x19\x02\x02\u{f4}\x34\x03\ + \x02\x02\x02\u{f5}\u{f6}\x09\x1a\x02\x02\u{f6}\x36\x03\x02\x02\x02\u{f7}\ + \u{f8}\x09\x1b\x02\x02\u{f8}\x38\x03\x02\x02\x02\u{f9}\u{fa}\x09\x1c\x02\ + \x02\u{fa}\x3a\x03\x02\x02\x02\u{fb}\u{fc}\x09\x1d\x02\x02\u{fc}\x3c\x03\ + \x02\x02\x02\u{fd}\u{fe}\x09\x1e\x02\x02\u{fe}\x3e\x03\x02\x02\x02\u{ff}\ + \u{100}\x09\x1f\x02\x02\u{100}\x40\x03\x02\x02\x02\u{101}\u{102}\x05\x0d\ + \x07\x02\u{102}\u{103}\x05\x31\x19\x02\u{103}\u{104}\x05\x31\x19\x02\u{104}\ + \u{105}\x05\x15\x0b\x02\u{105}\u{106}\x05\x2f\x18\x02\u{106}\u{107}\x05\ + \x33\x1a\x02\u{107}\x42\x03\x02\x02\x02\u{108}\u{109}\x05\x25\x13\x02\u{109}\ + \u{10a}\x05\x0d\x07\x02\u{10a}\u{10b}\x05\x33\x1a\x02\u{10b}\u{10c}\x05\ + \x11\x09\x02\u{10c}\u{10d}\x05\x1b\x0e\x02\u{10d}\u{10e}\x05\x15\x0b\x02\ + \u{10e}\u{10f}\x05\x31\x19\x02\u{10f}\x44\x03\x02\x02\x02\u{110}\u{111}\ + \x05\x1d\x0f\x02\u{111}\u{112}\x05\x17\x0c\x02\u{112}\x46\x03\x02\x02\x02\ + \u{113}\u{114}\x05\x33\x1a\x02\u{114}\u{115}\x05\x1b\x0e\x02\u{115}\u{116}\ + \x05\x15\x0b\x02\u{116}\u{117}\x05\x27\x14\x02\u{117}\x48\x03\x02\x02\x02\ + \u{118}\u{119}\x05\x15\x0b\x02\u{119}\u{11a}\x05\x23\x12\x02\u{11a}\u{11b}\ + \x05\x31\x19\x02\u{11b}\u{11c}\x05\x15\x0b\x02\u{11c}\x4a\x03\x02\x02\x02\ + \u{11d}\u{11e}\x05\x27\x14\x02\u{11e}\u{11f}\x05\x35\x1b\x02\u{11f}\u{120}\ + \x05\x23\x12\x02\u{120}\u{121}\x05\x23\x12\x02\u{121}\x4c\x03\x02\x02\x02\ + \u{122}\u{123}\x05\x33\x1a\x02\u{123}\u{124}\x05\x2f\x18\x02\u{124}\u{125}\ + \x05\x35\x1b\x02\u{125}\u{126}\x05\x15\x0b\x02\u{126}\x4e\x03\x02\x02\x02\ + \u{127}\u{128}\x05\x17\x0c\x02\u{128}\u{129}\x05\x0d\x07\x02\u{129}\u{12a}\ + \x05\x23\x12\x02\u{12a}\u{12b}\x05\x31\x19\x02\u{12b}\u{12c}\x05\x15\x0b\ + \x02\u{12c}\x50\x03\x02\x02\x02\u{12d}\u{12e}\x05\x25\x13\x02\u{12e}\u{12f}\ + \x05\x15\x0b\x02\u{12f}\u{130}\x05\x33\x1a\x02\u{130}\u{131}\x05\x0d\x07\ + \x02\u{131}\u{132}\x05\x0f\x08\x02\u{132}\u{133}\x05\x29\x15\x02\u{133}\ + \u{134}\x05\x29\x15\x02\u{134}\u{135}\x05\x23\x12\x02\u{135}\x52\x03\x02\ + \x02\x02\u{136}\u{137}\x05\x25\x13\x02\u{137}\u{138}\x05\x15\x0b\x02\u{138}\ + \u{139}\x05\x33\x1a\x02\u{139}\u{13a}\x05\x0d\x07\x02\u{13a}\u{13b}\x05\ + \x1d\x0f\x02\u{13b}\u{13c}\x05\x27\x14\x02\u{13c}\u{13d}\x05\x33\x1a\x02\ + \u{13d}\x54\x03\x02\x02\x02\u{13e}\u{13f}\x05\x25\x13\x02\u{13f}\u{140}\ + \x05\x15\x0b\x02\u{140}\u{141}\x05\x33\x1a\x02\u{141}\u{142}\x05\x0d\x07\ + \x02\u{142}\u{143}\x05\x15\x0b\x02\u{143}\u{144}\x05\x27\x14\x02\u{144}\ + \u{145}\x05\x35\x1b\x02\u{145}\u{146}\x05\x25\x13\x02\u{146}\x56\x03\x02\ + \x02\x02\u{147}\u{148}\x05\x25\x13\x02\u{148}\u{149}\x05\x15\x0b\x02\u{149}\ + \u{14a}\x05\x33\x1a\x02\u{14a}\u{14b}\x05\x0d\x07\x02\u{14b}\u{14c}\x05\ + \x31\x19\x02\u{14c}\u{14d}\x05\x33\x1a\x02\u{14d}\u{14e}\x05\x2f\x18\x02\ + \u{14e}\x58\x03\x02\x02\x02\u{14f}\u{150}\x05\x33\x1a\x02\u{150}\u{151}\ + \x05\x3d\x1f\x02\u{151}\u{152}\x05\x2b\x16\x02\u{152}\u{153}\x05\x15\x0b\ + \x02\u{153}\u{154}\x05\x27\x14\x02\u{154}\u{155}\x05\x0d\x07\x02\u{155}\ + \u{156}\x05\x25\x13\x02\u{156}\u{157}\x05\x15\x0b\x02\u{157}\x5a\x03\x02\ + \x02\x02\u{158}\u{159}\x07\x30\x02\x02\u{159}\x5c\x03\x02\x02\x02\u{15a}\ + \u{15b}\x07\x2e\x02\x02\u{15b}\x5e\x03\x02\x02\x02\u{15c}\u{15d}\x07\x3c\ + \x02\x02\u{15d}\x60\x03\x02\x02\x02\u{15e}\u{15f}\x07\x3d\x02\x02\u{15f}\ + \x62\x03\x02\x02\x02\u{160}\u{161}\x07\x41\x02\x02\u{161}\x64\x03\x02\x02\ + \x02\u{162}\u{163}\x07\x23\x02\x02\u{163}\x66\x03\x02\x02\x02\u{164}\u{165}\ + \x07\x2a\x02\x02\u{165}\x68\x03\x02\x02\x02\u{166}\u{167}\x07\x2b\x02\x02\ + \u{167}\x6a\x03\x02\x02\x02\u{168}\u{169}\x07\x7d\x02\x02\u{169}\x6c\x03\ + \x02\x02\x02\u{16a}\u{16b}\x07\x7f\x02\x02\u{16b}\x6e\x03\x02\x02\x02\u{16c}\ + \u{16d}\x07\x5d\x02\x02\u{16d}\x70\x03\x02\x02\x02\u{16e}\u{16f}\x07\x5f\ + \x02\x02\u{16f}\x72\x03\x02\x02\x02\u{170}\u{171}\x07\x3f\x02\x02\u{171}\ + \x74\x03\x02\x02\x02\u{172}\u{173}\x07\x7e\x02\x02\u{173}\u{174}\x07\x7e\ + \x02\x02\u{174}\x76\x03\x02\x02\x02\u{175}\u{176}\x07\x28\x02\x02\u{176}\ + \u{177}\x07\x28\x02\x02\u{177}\x78\x03\x02\x02\x02\u{178}\u{179}\x07\x3f\ + \x02\x02\u{179}\u{17a}\x07\x3f\x02\x02\u{17a}\x7a\x03\x02\x02\x02\u{17b}\ + \u{17c}\x07\x23\x02\x02\u{17c}\u{17d}\x07\x3f\x02\x02\u{17d}\x7c\x03\x02\ + \x02\x02\u{17e}\u{17f}\x07\x3e\x02\x02\u{17f}\x7e\x03\x02\x02\x02\u{180}\ + \u{181}\x07\x3e\x02\x02\u{181}\u{182}\x07\x3f\x02\x02\u{182}\u{80}\x03\ + \x02\x02\x02\u{183}\u{184}\x07\x40\x02\x02\u{184}\u{82}\x03\x02\x02\x02\ + \u{185}\u{186}\x07\x40\x02\x02\u{186}\u{187}\x07\x3f\x02\x02\u{187}\u{84}\ + \x03\x02\x02\x02\u{188}\u{189}\x07\x2d\x02\x02\u{189}\u{86}\x03\x02\x02\ + \x02\u{18a}\u{18b}\x07\x2f\x02\x02\u{18b}\u{88}\x03\x02\x02\x02\u{18c}\ + \u{18d}\x07\x2c\x02\x02\u{18d}\u{8a}\x03\x02\x02\x02\u{18e}\u{18f}\x07\ + \x31\x02\x02\u{18f}\u{8c}\x03\x02\x02\x02\u{190}\u{191}\x07\x30\x02\x02\ + \u{191}\u{192}\x07\x30\x02\x02\u{192}\u{8e}\x03\x02\x02\x02\u{193}\u{197}\ + \x09\x20\x02\x02\u{194}\u{196}\x09\x21\x02\x02\u{195}\u{194}\x03\x02\x02\ + \x02\u{196}\u{199}\x03\x02\x02\x02\u{197}\u{195}\x03\x02\x02\x02\u{197}\ + \u{198}\x03\x02\x02\x02\u{198}\u{90}\x03\x02\x02\x02\u{199}\u{197}\x03\ + \x02\x02\x02\u{19a}\u{19b}\x07\x32\x02\x02\u{19b}\u{92}\x03\x02\x02\x02\ + \u{19c}\u{19d}\x07\x24\x02\x02\u{19d}\u{19e}\x0a\x22\x02\x02\u{19e}\u{19f}\ + \x07\x24\x02\x02\u{19f}\u{94}\x03\x02\x02\x02\u{1a0}\u{1a4}\x09\x23\x02\ + \x02\u{1a1}\u{1a3}\x09\x24\x02\x02\u{1a2}\u{1a1}\x03\x02\x02\x02\u{1a3}\ + \u{1a6}\x03\x02\x02\x02\u{1a4}\u{1a2}\x03\x02\x02\x02\u{1a4}\u{1a5}\x03\ + \x02\x02\x02\u{1a5}\u{96}\x03\x02\x02\x02\u{1a6}\u{1a4}\x03\x02\x02\x02\ + \x0c\x02\u{9d}\u{a9}\u{ac}\u{b1}\u{bc}\u{c3}\u{c9}\u{197}\u{1a4}\x03\x02\ + \x03\x02"; diff --git a/rs/src/parse/extensions/simple/derivations/substraittypelistener.rs b/rs/src/parse/extensions/simple/derivations/substraittypelistener.rs new file mode 100644 index 00000000..add79436 --- /dev/null +++ b/rs/src/parse/extensions/simple/derivations/substraittypelistener.rs @@ -0,0 +1,727 @@ +// SPDX-License-Identifier: Apache-2.0 +#![allow(clippy::all)] +#![cfg_attr(rustfmt, rustfmt_skip)] +#![allow(nonstandard_style)] +// Generated from SubstraitType.g4 by ANTLR 4.8 +use antlr_rust::tree::ParseTreeListener; +use super::substraittypeparser::*; + +pub trait SubstraitTypeListener<'input> : ParseTreeListener<'input,SubstraitTypeParserContextType>{ +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#startPattern}. + * @param ctx the parse tree + */ +fn enter_startPattern(&mut self, _ctx: &StartPatternContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#startPattern}. + * @param ctx the parse tree + */ +fn exit_startPattern(&mut self, _ctx: &StartPatternContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#startProgram}. + * @param ctx the parse tree + */ +fn enter_startProgram(&mut self, _ctx: &StartProgramContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#startProgram}. + * @param ctx the parse tree + */ +fn exit_startProgram(&mut self, _ctx: &StartProgramContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#program}. + * @param ctx the parse tree + */ +fn enter_program(&mut self, _ctx: &ProgramContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#program}. + * @param ctx the parse tree + */ +fn exit_program(&mut self, _ctx: &ProgramContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#statementSeparator}. + * @param ctx the parse tree + */ +fn enter_statementSeparator(&mut self, _ctx: &StatementSeparatorContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#statementSeparator}. + * @param ctx the parse tree + */ +fn exit_statementSeparator(&mut self, _ctx: &StatementSeparatorContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Normal} + * labeled alternative in {@link SubstraitTypeParser#statement}. + * @param ctx the parse tree + */ +fn enter_Normal(&mut self, _ctx: &NormalContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Normal} + * labeled alternative in {@link SubstraitTypeParser#statement}. + * @param ctx the parse tree + */ +fn exit_Normal(&mut self, _ctx: &NormalContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Match} + * labeled alternative in {@link SubstraitTypeParser#statement}. + * @param ctx the parse tree + */ +fn enter_Match(&mut self, _ctx: &MatchContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Match} + * labeled alternative in {@link SubstraitTypeParser#statement}. + * @param ctx the parse tree + */ +fn exit_Match(&mut self, _ctx: &MatchContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Assert} + * labeled alternative in {@link SubstraitTypeParser#statement}. + * @param ctx the parse tree + */ +fn enter_Assert(&mut self, _ctx: &AssertContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Assert} + * labeled alternative in {@link SubstraitTypeParser#statement}. + * @param ctx the parse tree + */ +fn exit_Assert(&mut self, _ctx: &AssertContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#pattern}. + * @param ctx the parse tree + */ +fn enter_pattern(&mut self, _ctx: &PatternContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#pattern}. + * @param ctx the parse tree + */ +fn exit_pattern(&mut self, _ctx: &PatternContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#patternOr}. + * @param ctx the parse tree + */ +fn enter_patternOr(&mut self, _ctx: &PatternOrContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#patternOr}. + * @param ctx the parse tree + */ +fn exit_patternOr(&mut self, _ctx: &PatternOrContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Or} + * labeled alternative in {@link SubstraitTypeParser#operatorOr}. + * @param ctx the parse tree + */ +fn enter_Or(&mut self, _ctx: &OrContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Or} + * labeled alternative in {@link SubstraitTypeParser#operatorOr}. + * @param ctx the parse tree + */ +fn exit_Or(&mut self, _ctx: &OrContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#patternAnd}. + * @param ctx the parse tree + */ +fn enter_patternAnd(&mut self, _ctx: &PatternAndContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#patternAnd}. + * @param ctx the parse tree + */ +fn exit_patternAnd(&mut self, _ctx: &PatternAndContext<'input>) { } +/** + * Enter a parse tree produced by the {@code And} + * labeled alternative in {@link SubstraitTypeParser#operatorAnd}. + * @param ctx the parse tree + */ +fn enter_And(&mut self, _ctx: &AndContext<'input>) { } +/** + * Exit a parse tree produced by the {@code And} + * labeled alternative in {@link SubstraitTypeParser#operatorAnd}. + * @param ctx the parse tree + */ +fn exit_And(&mut self, _ctx: &AndContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#patternEqNeq}. + * @param ctx the parse tree + */ +fn enter_patternEqNeq(&mut self, _ctx: &PatternEqNeqContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#patternEqNeq}. + * @param ctx the parse tree + */ +fn exit_patternEqNeq(&mut self, _ctx: &PatternEqNeqContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Eq} + * labeled alternative in {@link SubstraitTypeParser#operatorEqNeq}. + * @param ctx the parse tree + */ +fn enter_Eq(&mut self, _ctx: &EqContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Eq} + * labeled alternative in {@link SubstraitTypeParser#operatorEqNeq}. + * @param ctx the parse tree + */ +fn exit_Eq(&mut self, _ctx: &EqContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Neq} + * labeled alternative in {@link SubstraitTypeParser#operatorEqNeq}. + * @param ctx the parse tree + */ +fn enter_Neq(&mut self, _ctx: &NeqContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Neq} + * labeled alternative in {@link SubstraitTypeParser#operatorEqNeq}. + * @param ctx the parse tree + */ +fn exit_Neq(&mut self, _ctx: &NeqContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#patternIneq}. + * @param ctx the parse tree + */ +fn enter_patternIneq(&mut self, _ctx: &PatternIneqContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#patternIneq}. + * @param ctx the parse tree + */ +fn exit_patternIneq(&mut self, _ctx: &PatternIneqContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Lt} + * labeled alternative in {@link SubstraitTypeParser#operatorIneq}. + * @param ctx the parse tree + */ +fn enter_Lt(&mut self, _ctx: &LtContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Lt} + * labeled alternative in {@link SubstraitTypeParser#operatorIneq}. + * @param ctx the parse tree + */ +fn exit_Lt(&mut self, _ctx: &LtContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Le} + * labeled alternative in {@link SubstraitTypeParser#operatorIneq}. + * @param ctx the parse tree + */ +fn enter_Le(&mut self, _ctx: &LeContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Le} + * labeled alternative in {@link SubstraitTypeParser#operatorIneq}. + * @param ctx the parse tree + */ +fn exit_Le(&mut self, _ctx: &LeContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Gt} + * labeled alternative in {@link SubstraitTypeParser#operatorIneq}. + * @param ctx the parse tree + */ +fn enter_Gt(&mut self, _ctx: &GtContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Gt} + * labeled alternative in {@link SubstraitTypeParser#operatorIneq}. + * @param ctx the parse tree + */ +fn exit_Gt(&mut self, _ctx: &GtContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Ge} + * labeled alternative in {@link SubstraitTypeParser#operatorIneq}. + * @param ctx the parse tree + */ +fn enter_Ge(&mut self, _ctx: &GeContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Ge} + * labeled alternative in {@link SubstraitTypeParser#operatorIneq}. + * @param ctx the parse tree + */ +fn exit_Ge(&mut self, _ctx: &GeContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#patternAddSub}. + * @param ctx the parse tree + */ +fn enter_patternAddSub(&mut self, _ctx: &PatternAddSubContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#patternAddSub}. + * @param ctx the parse tree + */ +fn exit_patternAddSub(&mut self, _ctx: &PatternAddSubContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Add} + * labeled alternative in {@link SubstraitTypeParser#operatorAddSub}. + * @param ctx the parse tree + */ +fn enter_Add(&mut self, _ctx: &AddContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Add} + * labeled alternative in {@link SubstraitTypeParser#operatorAddSub}. + * @param ctx the parse tree + */ +fn exit_Add(&mut self, _ctx: &AddContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Sub} + * labeled alternative in {@link SubstraitTypeParser#operatorAddSub}. + * @param ctx the parse tree + */ +fn enter_Sub(&mut self, _ctx: &SubContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Sub} + * labeled alternative in {@link SubstraitTypeParser#operatorAddSub}. + * @param ctx the parse tree + */ +fn exit_Sub(&mut self, _ctx: &SubContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#patternMulDiv}. + * @param ctx the parse tree + */ +fn enter_patternMulDiv(&mut self, _ctx: &PatternMulDivContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#patternMulDiv}. + * @param ctx the parse tree + */ +fn exit_patternMulDiv(&mut self, _ctx: &PatternMulDivContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Mul} + * labeled alternative in {@link SubstraitTypeParser#operatorMulDiv}. + * @param ctx the parse tree + */ +fn enter_Mul(&mut self, _ctx: &MulContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Mul} + * labeled alternative in {@link SubstraitTypeParser#operatorMulDiv}. + * @param ctx the parse tree + */ +fn exit_Mul(&mut self, _ctx: &MulContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Div} + * labeled alternative in {@link SubstraitTypeParser#operatorMulDiv}. + * @param ctx the parse tree + */ +fn enter_Div(&mut self, _ctx: &DivContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Div} + * labeled alternative in {@link SubstraitTypeParser#operatorMulDiv}. + * @param ctx the parse tree + */ +fn exit_Div(&mut self, _ctx: &DivContext<'input>) { } +/** + * Enter a parse tree produced by the {@code parentheses} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_parentheses(&mut self, _ctx: &ParenthesesContext<'input>) { } +/** + * Exit a parse tree produced by the {@code parentheses} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_parentheses(&mut self, _ctx: &ParenthesesContext<'input>) { } +/** + * Enter a parse tree produced by the {@code ifThenElse} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_ifThenElse(&mut self, _ctx: &IfThenElseContext<'input>) { } +/** + * Exit a parse tree produced by the {@code ifThenElse} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_ifThenElse(&mut self, _ctx: &IfThenElseContext<'input>) { } +/** + * Enter a parse tree produced by the {@code unaryNot} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_unaryNot(&mut self, _ctx: &UnaryNotContext<'input>) { } +/** + * Exit a parse tree produced by the {@code unaryNot} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_unaryNot(&mut self, _ctx: &UnaryNotContext<'input>) { } +/** + * Enter a parse tree produced by the {@code any} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_any(&mut self, _ctx: &AnyContext<'input>) { } +/** + * Exit a parse tree produced by the {@code any} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_any(&mut self, _ctx: &AnyContext<'input>) { } +/** + * Enter a parse tree produced by the {@code boolAny} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_boolAny(&mut self, _ctx: &BoolAnyContext<'input>) { } +/** + * Exit a parse tree produced by the {@code boolAny} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_boolAny(&mut self, _ctx: &BoolAnyContext<'input>) { } +/** + * Enter a parse tree produced by the {@code boolTrue} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_boolTrue(&mut self, _ctx: &BoolTrueContext<'input>) { } +/** + * Exit a parse tree produced by the {@code boolTrue} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_boolTrue(&mut self, _ctx: &BoolTrueContext<'input>) { } +/** + * Enter a parse tree produced by the {@code boolFalse} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_boolFalse(&mut self, _ctx: &BoolFalseContext<'input>) { } +/** + * Exit a parse tree produced by the {@code boolFalse} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_boolFalse(&mut self, _ctx: &BoolFalseContext<'input>) { } +/** + * Enter a parse tree produced by the {@code intAny} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_intAny(&mut self, _ctx: &IntAnyContext<'input>) { } +/** + * Exit a parse tree produced by the {@code intAny} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_intAny(&mut self, _ctx: &IntAnyContext<'input>) { } +/** + * Enter a parse tree produced by the {@code intRange} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_intRange(&mut self, _ctx: &IntRangeContext<'input>) { } +/** + * Exit a parse tree produced by the {@code intRange} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_intRange(&mut self, _ctx: &IntRangeContext<'input>) { } +/** + * Enter a parse tree produced by the {@code intAtLeast} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_intAtLeast(&mut self, _ctx: &IntAtLeastContext<'input>) { } +/** + * Exit a parse tree produced by the {@code intAtLeast} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_intAtLeast(&mut self, _ctx: &IntAtLeastContext<'input>) { } +/** + * Enter a parse tree produced by the {@code intAtMost} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_intAtMost(&mut self, _ctx: &IntAtMostContext<'input>) { } +/** + * Exit a parse tree produced by the {@code intAtMost} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_intAtMost(&mut self, _ctx: &IntAtMostContext<'input>) { } +/** + * Enter a parse tree produced by the {@code intExactly} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_intExactly(&mut self, _ctx: &IntExactlyContext<'input>) { } +/** + * Exit a parse tree produced by the {@code intExactly} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_intExactly(&mut self, _ctx: &IntExactlyContext<'input>) { } +/** + * Enter a parse tree produced by the {@code enumAny} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_enumAny(&mut self, _ctx: &EnumAnyContext<'input>) { } +/** + * Exit a parse tree produced by the {@code enumAny} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_enumAny(&mut self, _ctx: &EnumAnyContext<'input>) { } +/** + * Enter a parse tree produced by the {@code enumSet} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_enumSet(&mut self, _ctx: &EnumSetContext<'input>) { } +/** + * Exit a parse tree produced by the {@code enumSet} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_enumSet(&mut self, _ctx: &EnumSetContext<'input>) { } +/** + * Enter a parse tree produced by the {@code strAny} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_strAny(&mut self, _ctx: &StrAnyContext<'input>) { } +/** + * Exit a parse tree produced by the {@code strAny} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_strAny(&mut self, _ctx: &StrAnyContext<'input>) { } +/** + * Enter a parse tree produced by the {@code strExactly} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_strExactly(&mut self, _ctx: &StrExactlyContext<'input>) { } +/** + * Exit a parse tree produced by the {@code strExactly} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_strExactly(&mut self, _ctx: &StrExactlyContext<'input>) { } +/** + * Enter a parse tree produced by the {@code dtAny} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_dtAny(&mut self, _ctx: &DtAnyContext<'input>) { } +/** + * Exit a parse tree produced by the {@code dtAny} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_dtAny(&mut self, _ctx: &DtAnyContext<'input>) { } +/** + * Enter a parse tree produced by the {@code function} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_function(&mut self, _ctx: &FunctionContext<'input>) { } +/** + * Exit a parse tree produced by the {@code function} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_function(&mut self, _ctx: &FunctionContext<'input>) { } +/** + * Enter a parse tree produced by the {@code datatypeBindingOrConstant} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_datatypeBindingOrConstant(&mut self, _ctx: &DatatypeBindingOrConstantContext<'input>) { } +/** + * Exit a parse tree produced by the {@code datatypeBindingOrConstant} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_datatypeBindingOrConstant(&mut self, _ctx: &DatatypeBindingOrConstantContext<'input>) { } +/** + * Enter a parse tree produced by the {@code inconsistent} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_inconsistent(&mut self, _ctx: &InconsistentContext<'input>) { } +/** + * Exit a parse tree produced by the {@code inconsistent} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_inconsistent(&mut self, _ctx: &InconsistentContext<'input>) { } +/** + * Enter a parse tree produced by the {@code unaryNegate} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn enter_unaryNegate(&mut self, _ctx: &UnaryNegateContext<'input>) { } +/** + * Exit a parse tree produced by the {@code unaryNegate} + * labeled alternative in {@link SubstraitTypeParser#patternMisc}. + * @param ctx the parse tree + */ +fn exit_unaryNegate(&mut self, _ctx: &UnaryNegateContext<'input>) { } +/** + * Enter a parse tree produced by the {@code nonNullable} + * labeled alternative in {@link SubstraitTypeParser#nullability}. + * @param ctx the parse tree + */ +fn enter_nonNullable(&mut self, _ctx: &NonNullableContext<'input>) { } +/** + * Exit a parse tree produced by the {@code nonNullable} + * labeled alternative in {@link SubstraitTypeParser#nullability}. + * @param ctx the parse tree + */ +fn exit_nonNullable(&mut self, _ctx: &NonNullableContext<'input>) { } +/** + * Enter a parse tree produced by the {@code nullable} + * labeled alternative in {@link SubstraitTypeParser#nullability}. + * @param ctx the parse tree + */ +fn enter_nullable(&mut self, _ctx: &NullableContext<'input>) { } +/** + * Exit a parse tree produced by the {@code nullable} + * labeled alternative in {@link SubstraitTypeParser#nullability}. + * @param ctx the parse tree + */ +fn exit_nullable(&mut self, _ctx: &NullableContext<'input>) { } +/** + * Enter a parse tree produced by the {@code nullableIf} + * labeled alternative in {@link SubstraitTypeParser#nullability}. + * @param ctx the parse tree + */ +fn enter_nullableIf(&mut self, _ctx: &NullableIfContext<'input>) { } +/** + * Exit a parse tree produced by the {@code nullableIf} + * labeled alternative in {@link SubstraitTypeParser#nullability}. + * @param ctx the parse tree + */ +fn exit_nullableIf(&mut self, _ctx: &NullableIfContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#variation}. + * @param ctx the parse tree + */ +fn enter_variation(&mut self, _ctx: &VariationContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#variation}. + * @param ctx the parse tree + */ +fn exit_variation(&mut self, _ctx: &VariationContext<'input>) { } +/** + * Enter a parse tree produced by the {@code varAny} + * labeled alternative in {@link SubstraitTypeParser#variationBody}. + * @param ctx the parse tree + */ +fn enter_varAny(&mut self, _ctx: &VarAnyContext<'input>) { } +/** + * Exit a parse tree produced by the {@code varAny} + * labeled alternative in {@link SubstraitTypeParser#variationBody}. + * @param ctx the parse tree + */ +fn exit_varAny(&mut self, _ctx: &VarAnyContext<'input>) { } +/** + * Enter a parse tree produced by the {@code varSystemPreferred} + * labeled alternative in {@link SubstraitTypeParser#variationBody}. + * @param ctx the parse tree + */ +fn enter_varSystemPreferred(&mut self, _ctx: &VarSystemPreferredContext<'input>) { } +/** + * Exit a parse tree produced by the {@code varSystemPreferred} + * labeled alternative in {@link SubstraitTypeParser#variationBody}. + * @param ctx the parse tree + */ +fn exit_varSystemPreferred(&mut self, _ctx: &VarSystemPreferredContext<'input>) { } +/** + * Enter a parse tree produced by the {@code varUserDefined} + * labeled alternative in {@link SubstraitTypeParser#variationBody}. + * @param ctx the parse tree + */ +fn enter_varUserDefined(&mut self, _ctx: &VarUserDefinedContext<'input>) { } +/** + * Exit a parse tree produced by the {@code varUserDefined} + * labeled alternative in {@link SubstraitTypeParser#variationBody}. + * @param ctx the parse tree + */ +fn exit_varUserDefined(&mut self, _ctx: &VarUserDefinedContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#parameters}. + * @param ctx the parse tree + */ +fn enter_parameters(&mut self, _ctx: &ParametersContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#parameters}. + * @param ctx the parse tree + */ +fn exit_parameters(&mut self, _ctx: &ParametersContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#parameter}. + * @param ctx the parse tree + */ +fn enter_parameter(&mut self, _ctx: &ParameterContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#parameter}. + * @param ctx the parse tree + */ +fn exit_parameter(&mut self, _ctx: &ParameterContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Null} + * labeled alternative in {@link SubstraitTypeParser#parameterValue}. + * @param ctx the parse tree + */ +fn enter_Null(&mut self, _ctx: &NullContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Null} + * labeled alternative in {@link SubstraitTypeParser#parameterValue}. + * @param ctx the parse tree + */ +fn exit_Null(&mut self, _ctx: &NullContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Specified} + * labeled alternative in {@link SubstraitTypeParser#parameterValue}. + * @param ctx the parse tree + */ +fn enter_Specified(&mut self, _ctx: &SpecifiedContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Specified} + * labeled alternative in {@link SubstraitTypeParser#parameterValue}. + * @param ctx the parse tree + */ +fn exit_Specified(&mut self, _ctx: &SpecifiedContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#integer}. + * @param ctx the parse tree + */ +fn enter_integer(&mut self, _ctx: &IntegerContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#integer}. + * @param ctx the parse tree + */ +fn exit_integer(&mut self, _ctx: &IntegerContext<'input>) { } +/** + * Enter a parse tree produced by {@link SubstraitTypeParser#identifierPath}. + * @param ctx the parse tree + */ +fn enter_identifierPath(&mut self, _ctx: &IdentifierPathContext<'input>) { } +/** + * Exit a parse tree produced by {@link SubstraitTypeParser#identifierPath}. + * @param ctx the parse tree + */ +fn exit_identifierPath(&mut self, _ctx: &IdentifierPathContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Str} + * labeled alternative in {@link SubstraitTypeParser#identifierOrString}. + * @param ctx the parse tree + */ +fn enter_Str(&mut self, _ctx: &StrContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Str} + * labeled alternative in {@link SubstraitTypeParser#identifierOrString}. + * @param ctx the parse tree + */ +fn exit_Str(&mut self, _ctx: &StrContext<'input>) { } +/** + * Enter a parse tree produced by the {@code Ident} + * labeled alternative in {@link SubstraitTypeParser#identifierOrString}. + * @param ctx the parse tree + */ +fn enter_Ident(&mut self, _ctx: &IdentContext<'input>) { } +/** + * Exit a parse tree produced by the {@code Ident} + * labeled alternative in {@link SubstraitTypeParser#identifierOrString}. + * @param ctx the parse tree + */ +fn exit_Ident(&mut self, _ctx: &IdentContext<'input>) { } + +} + +antlr_rust::coerce_from!{ 'input : SubstraitTypeListener<'input> } + + diff --git a/rs/src/parse/extensions/simple/derivations/substraittypeparser.rs b/rs/src/parse/extensions/simple/derivations/substraittypeparser.rs new file mode 100644 index 00000000..1e216a83 --- /dev/null +++ b/rs/src/parse/extensions/simple/derivations/substraittypeparser.rs @@ -0,0 +1,7301 @@ +// SPDX-License-Identifier: Apache-2.0 +#![allow(clippy::all)] +#![cfg_attr(rustfmt, rustfmt_skip)] +// Generated from SubstraitType.g4 by ANTLR 4.8 +#![allow(dead_code)] +#![allow(non_snake_case)] +#![allow(non_upper_case_globals)] +#![allow(nonstandard_style)] +#![allow(unused_imports)] +#![allow(unused_mut)] +#![allow(unused_braces)] +use antlr_rust::PredictionContextCache; +use antlr_rust::parser::{Parser, BaseParser, ParserRecog, ParserNodeType}; +use antlr_rust::token_stream::TokenStream; +use antlr_rust::TokenSource; +use antlr_rust::parser_atn_simulator::ParserATNSimulator; +use antlr_rust::errors::*; +use antlr_rust::rule_context::{BaseRuleContext, CustomRuleContext, RuleContext}; +use antlr_rust::recognizer::{Recognizer,Actions}; +use antlr_rust::atn_deserializer::ATNDeserializer; +use antlr_rust::dfa::DFA; +use antlr_rust::atn::{ATN, INVALID_ALT}; +use antlr_rust::error_strategy::{ErrorStrategy, DefaultErrorStrategy}; +use antlr_rust::parser_rule_context::{BaseParserRuleContext, ParserRuleContext,cast,cast_mut}; +use antlr_rust::tree::*; +use antlr_rust::token::{TOKEN_EOF,OwningToken,Token}; +use antlr_rust::int_stream::EOF; +use antlr_rust::vocabulary::{Vocabulary,VocabularyImpl}; +use antlr_rust::token_factory::{CommonTokenFactory,TokenFactory, TokenAware}; +use super::substraittypelistener::*; +use antlr_rust::lazy_static; +use antlr_rust::{TidAble,TidExt}; + +use std::marker::PhantomData; +use std::sync::Arc; +use std::rc::Rc; +use std::convert::TryFrom; +use std::cell::RefCell; +use std::ops::{DerefMut, Deref}; +use std::borrow::{Borrow,BorrowMut}; +use std::any::{Any,TypeId}; + + pub const LineComment:isize=1; + pub const BlockComment:isize=2; + pub const Whitespace:isize=3; + pub const Newline:isize=4; + pub const EscNewline:isize=5; + pub const Assert:isize=6; + pub const Matches:isize=7; + pub const If:isize=8; + pub const Then:isize=9; + pub const Else:isize=10; + pub const Null:isize=11; + pub const True:isize=12; + pub const False:isize=13; + pub const Metabool:isize=14; + pub const Metaint:isize=15; + pub const Metaenum:isize=16; + pub const Metastr:isize=17; + pub const Typename:isize=18; + pub const Period:isize=19; + pub const Comma:isize=20; + pub const Colon:isize=21; + pub const Semicolon:isize=22; + pub const Question:isize=23; + pub const Bang:isize=24; + pub const OpenParen:isize=25; + pub const CloseParen:isize=26; + pub const OpenCurly:isize=27; + pub const CloseCurly:isize=28; + pub const OpenSquare:isize=29; + pub const CloseSquare:isize=30; + pub const Assign:isize=31; + pub const BooleanOr:isize=32; + pub const BooleanAnd:isize=33; + pub const Equal:isize=34; + pub const NotEqual:isize=35; + pub const LessThan:isize=36; + pub const LessEqual:isize=37; + pub const GreaterThan:isize=38; + pub const GreaterEqual:isize=39; + pub const Plus:isize=40; + pub const Minus:isize=41; + pub const Multiply:isize=42; + pub const Divide:isize=43; + pub const Range:isize=44; + pub const Nonzero:isize=45; + pub const Zero:isize=46; + pub const String:isize=47; + pub const Identifier:isize=48; + pub const RULE_startPattern:usize = 0; + pub const RULE_startProgram:usize = 1; + pub const RULE_program:usize = 2; + pub const RULE_statementSeparator:usize = 3; + pub const RULE_statement:usize = 4; + pub const RULE_pattern:usize = 5; + pub const RULE_patternOr:usize = 6; + pub const RULE_operatorOr:usize = 7; + pub const RULE_patternAnd:usize = 8; + pub const RULE_operatorAnd:usize = 9; + pub const RULE_patternEqNeq:usize = 10; + pub const RULE_operatorEqNeq:usize = 11; + pub const RULE_patternIneq:usize = 12; + pub const RULE_operatorIneq:usize = 13; + pub const RULE_patternAddSub:usize = 14; + pub const RULE_operatorAddSub:usize = 15; + pub const RULE_patternMulDiv:usize = 16; + pub const RULE_operatorMulDiv:usize = 17; + pub const RULE_patternMisc:usize = 18; + pub const RULE_nullability:usize = 19; + pub const RULE_variation:usize = 20; + pub const RULE_variationBody:usize = 21; + pub const RULE_parameters:usize = 22; + pub const RULE_parameter:usize = 23; + pub const RULE_parameterValue:usize = 24; + pub const RULE_integer:usize = 25; + pub const RULE_identifierPath:usize = 26; + pub const RULE_identifierOrString:usize = 27; + pub const ruleNames: [&'static str; 28] = [ + "startPattern", "startProgram", "program", "statementSeparator", "statement", + "pattern", "patternOr", "operatorOr", "patternAnd", "operatorAnd", "patternEqNeq", + "operatorEqNeq", "patternIneq", "operatorIneq", "patternAddSub", "operatorAddSub", + "patternMulDiv", "operatorMulDiv", "patternMisc", "nullability", "variation", + "variationBody", "parameters", "parameter", "parameterValue", "integer", + "identifierPath", "identifierOrString" + ]; + + + pub const _LITERAL_NAMES: [Option<&'static str>;47] = [ + None, None, None, None, None, None, None, None, None, None, None, None, + None, None, None, None, None, None, None, Some("'.'"), Some("','"), Some("':'"), + Some("';'"), Some("'?'"), Some("'!'"), Some("'('"), Some("')'"), Some("'{'"), + Some("'}'"), Some("'['"), Some("']'"), Some("'='"), Some("'||'"), Some("'&&'"), + Some("'=='"), Some("'!='"), Some("'<'"), Some("'<='"), Some("'>'"), Some("'>='"), + Some("'+'"), Some("'-'"), Some("'*'"), Some("'/'"), Some("'..'"), None, + Some("'0'") + ]; + pub const _SYMBOLIC_NAMES: [Option<&'static str>;49] = [ + None, Some("LineComment"), Some("BlockComment"), Some("Whitespace"), Some("Newline"), + Some("EscNewline"), Some("Assert"), Some("Matches"), Some("If"), Some("Then"), + Some("Else"), Some("Null"), Some("True"), Some("False"), Some("Metabool"), + Some("Metaint"), Some("Metaenum"), Some("Metastr"), Some("Typename"), + Some("Period"), Some("Comma"), Some("Colon"), Some("Semicolon"), Some("Question"), + Some("Bang"), Some("OpenParen"), Some("CloseParen"), Some("OpenCurly"), + Some("CloseCurly"), Some("OpenSquare"), Some("CloseSquare"), Some("Assign"), + Some("BooleanOr"), Some("BooleanAnd"), Some("Equal"), Some("NotEqual"), + Some("LessThan"), Some("LessEqual"), Some("GreaterThan"), Some("GreaterEqual"), + Some("Plus"), Some("Minus"), Some("Multiply"), Some("Divide"), Some("Range"), + Some("Nonzero"), Some("Zero"), Some("String"), Some("Identifier") + ]; + lazy_static!{ + static ref _shared_context_cache: Arc = Arc::new(PredictionContextCache::new()); + static ref VOCABULARY: Box = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); + } + + +type BaseParserType<'input, I> = + BaseParser<'input,SubstraitTypeParserExt<'input>, I, SubstraitTypeParserContextType , dyn SubstraitTypeListener<'input> + 'input >; + +type TokenType<'input> = as TokenFactory<'input>>::Tok; +pub type LocalTokenFactory<'input> = CommonTokenFactory; + +pub type SubstraitTypeTreeWalker<'input,'a> = + ParseTreeWalker<'input, 'a, SubstraitTypeParserContextType , dyn SubstraitTypeListener<'input> + 'a>; + +/// Parser for SubstraitType grammar +pub struct SubstraitTypeParser<'input,I,H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + base:BaseParserType<'input,I>, + interpreter:Arc, + _shared_context_cache: Box, + pub err_handler: H, +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn get_serialized_atn() -> &'static str { _serializedATN } + + pub fn set_error_strategy(&mut self, strategy: H) { + self.err_handler = strategy + } + + pub fn with_strategy(input: I, strategy: H) -> Self { + antlr_rust::recognizer::check_version("0","3"); + let interpreter = Arc::new(ParserATNSimulator::new( + _ATN.clone(), + _decision_to_DFA.clone(), + _shared_context_cache.clone(), + )); + Self { + base: BaseParser::new_base_parser( + input, + Arc::clone(&interpreter), + SubstraitTypeParserExt{ + _pd: Default::default(), + } + ), + interpreter, + _shared_context_cache: Box::new(PredictionContextCache::new()), + err_handler: strategy, + } + } + +} + +type DynStrategy<'input,I> = Box> + 'input>; + +impl<'input, I> SubstraitTypeParser<'input, I, DynStrategy<'input,I>> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, +{ + pub fn with_dyn_strategy(input: I) -> Self{ + Self::with_strategy(input,Box::new(DefaultErrorStrategy::new())) + } +} + +impl<'input, I> SubstraitTypeParser<'input, I, DefaultErrorStrategy<'input,SubstraitTypeParserContextType>> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, +{ + pub fn new(input: I) -> Self{ + Self::with_strategy(input,DefaultErrorStrategy::new()) + } +} + +/// Trait for monomorphized trait object that corresponds to the nodes of parse tree generated for SubstraitTypeParser +pub trait SubstraitTypeParserContext<'input>: + for<'x> Listenable + 'x > + + ParserRuleContext<'input, TF=LocalTokenFactory<'input>, Ctx=SubstraitTypeParserContextType> +{} + +antlr_rust::coerce_from!{ 'input : SubstraitTypeParserContext<'input> } + +impl<'input> SubstraitTypeParserContext<'input> for TerminalNode<'input,SubstraitTypeParserContextType> {} +impl<'input> SubstraitTypeParserContext<'input> for ErrorNode<'input,SubstraitTypeParserContextType> {} + +antlr_rust::tid! { impl<'input> TidAble<'input> for dyn SubstraitTypeParserContext<'input> + 'input } + +antlr_rust::tid! { impl<'input> TidAble<'input> for dyn SubstraitTypeListener<'input> + 'input } + +pub struct SubstraitTypeParserContextType; +antlr_rust::tid!{SubstraitTypeParserContextType} + +impl<'input> ParserNodeType<'input> for SubstraitTypeParserContextType{ + type TF = LocalTokenFactory<'input>; + type Type = dyn SubstraitTypeParserContext<'input> + 'input; +} + +impl<'input, I, H> Deref for SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + type Target = BaseParserType<'input,I>; + + fn deref(&self) -> &Self::Target { + &self.base + } +} + +impl<'input, I, H> DerefMut for SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.base + } +} + +pub struct SubstraitTypeParserExt<'input>{ + _pd: PhantomData<&'input str>, +} + +impl<'input> SubstraitTypeParserExt<'input>{ +} +antlr_rust::tid! { SubstraitTypeParserExt<'a> } + +impl<'input> TokenAware<'input> for SubstraitTypeParserExt<'input>{ + type TF = LocalTokenFactory<'input>; +} + +impl<'input,I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>> ParserRecog<'input, BaseParserType<'input,I>> for SubstraitTypeParserExt<'input>{} + +impl<'input,I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>> Actions<'input, BaseParserType<'input,I>> for SubstraitTypeParserExt<'input>{ + fn get_grammar_file_name(&self) -> & str{ "SubstraitType.g4"} + + fn get_rule_names(&self) -> &[& str] {&ruleNames} + + fn get_vocabulary(&self) -> &dyn Vocabulary { &**VOCABULARY } +} +//------------------- startPattern ---------------- +pub type StartPatternContextAll<'input> = StartPatternContext<'input>; + + +pub type StartPatternContext<'input> = BaseParserRuleContext<'input,StartPatternContextExt<'input>>; + +#[derive(Clone)] +pub struct StartPatternContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for StartPatternContext<'input>{} + +impl<'input,'a> Listenable + 'a> for StartPatternContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_startPattern(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_startPattern(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for StartPatternContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_startPattern } + //fn type_rule_index() -> usize where Self: Sized { RULE_startPattern } +} +antlr_rust::tid!{StartPatternContextExt<'a>} + +impl<'input> StartPatternContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,StartPatternContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait StartPatternContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +fn pattern(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) +} +/// Retrieves first TerminalNode corresponding to token EOF +/// Returns `None` if there is no child corresponding to token EOF +fn EOF(&self) -> Option>> where Self:Sized{ + self.get_token(EOF, 0) +} +/// Retrieves all `TerminalNode`s corresponding to token Whitespace in current rule +fn Whitespace_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +/// Retrieves 'i's TerminalNode corresponding to token Whitespace, starting from 0. +/// Returns `None` if number of children corresponding to token Whitespace is less or equal than `i`. +fn Whitespace(&self, i: usize) -> Option>> where Self:Sized{ + self.get_token(Whitespace, i) +} +/// Retrieves all `TerminalNode`s corresponding to token Newline in current rule +fn Newline_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +/// Retrieves 'i's TerminalNode corresponding to token Newline, starting from 0. +/// Returns `None` if number of children corresponding to token Newline is less or equal than `i`. +fn Newline(&self, i: usize) -> Option>> where Self:Sized{ + self.get_token(Newline, i) +} + +} + +impl<'input> StartPatternContextAttrs<'input> for StartPatternContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn startPattern(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = StartPatternContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 0, RULE_startPattern); + let mut _localctx: Rc = _localctx; + let mut _la: isize = -1; + let result: Result<(), ANTLRError> = (|| { + + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(59); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + while _la==Whitespace { + { + { + recog.base.set_state(56); + recog.base.match_token(Whitespace,&mut recog.err_handler)?; + + } + } + recog.base.set_state(61); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + } + recog.base.set_state(65); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + while _la==Newline { + { + { + recog.base.set_state(62); + recog.base.match_token(Newline,&mut recog.err_handler)?; + + } + } + recog.base.set_state(67); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + } + /*InvokeRule pattern*/ + recog.base.set_state(68); + recog.pattern()?; + + recog.base.set_state(72); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + while _la==Newline { + { + { + recog.base.set_state(69); + recog.base.match_token(Newline,&mut recog.err_handler)?; + + } + } + recog.base.set_state(74); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + } + recog.base.set_state(75); + recog.base.match_token(EOF,&mut recog.err_handler)?; + + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- startProgram ---------------- +pub type StartProgramContextAll<'input> = StartProgramContext<'input>; + + +pub type StartProgramContext<'input> = BaseParserRuleContext<'input,StartProgramContextExt<'input>>; + +#[derive(Clone)] +pub struct StartProgramContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for StartProgramContext<'input>{} + +impl<'input,'a> Listenable + 'a> for StartProgramContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_startProgram(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_startProgram(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for StartProgramContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_startProgram } + //fn type_rule_index() -> usize where Self: Sized { RULE_startProgram } +} +antlr_rust::tid!{StartProgramContextExt<'a>} + +impl<'input> StartProgramContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,StartProgramContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait StartProgramContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +fn program(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) +} +/// Retrieves first TerminalNode corresponding to token EOF +/// Returns `None` if there is no child corresponding to token EOF +fn EOF(&self) -> Option>> where Self:Sized{ + self.get_token(EOF, 0) +} +/// Retrieves all `TerminalNode`s corresponding to token Whitespace in current rule +fn Whitespace_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +/// Retrieves 'i's TerminalNode corresponding to token Whitespace, starting from 0. +/// Returns `None` if number of children corresponding to token Whitespace is less or equal than `i`. +fn Whitespace(&self, i: usize) -> Option>> where Self:Sized{ + self.get_token(Whitespace, i) +} +/// Retrieves all `TerminalNode`s corresponding to token Newline in current rule +fn Newline_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +/// Retrieves 'i's TerminalNode corresponding to token Newline, starting from 0. +/// Returns `None` if number of children corresponding to token Newline is less or equal than `i`. +fn Newline(&self, i: usize) -> Option>> where Self:Sized{ + self.get_token(Newline, i) +} + +} + +impl<'input> StartProgramContextAttrs<'input> for StartProgramContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn startProgram(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = StartProgramContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 2, RULE_startProgram); + let mut _localctx: Rc = _localctx; + let mut _la: isize = -1; + let result: Result<(), ANTLRError> = (|| { + + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(80); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + while _la==Whitespace { + { + { + recog.base.set_state(77); + recog.base.match_token(Whitespace,&mut recog.err_handler)?; + + } + } + recog.base.set_state(82); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + } + recog.base.set_state(86); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + while _la==Newline { + { + { + recog.base.set_state(83); + recog.base.match_token(Newline,&mut recog.err_handler)?; + + } + } + recog.base.set_state(88); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + } + /*InvokeRule program*/ + recog.base.set_state(89); + recog.program()?; + + recog.base.set_state(93); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + while _la==Newline { + { + { + recog.base.set_state(90); + recog.base.match_token(Newline,&mut recog.err_handler)?; + + } + } + recog.base.set_state(95); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + } + recog.base.set_state(96); + recog.base.match_token(EOF,&mut recog.err_handler)?; + + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- program ---------------- +pub type ProgramContextAll<'input> = ProgramContext<'input>; + + +pub type ProgramContext<'input> = BaseParserRuleContext<'input,ProgramContextExt<'input>>; + +#[derive(Clone)] +pub struct ProgramContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for ProgramContext<'input>{} + +impl<'input,'a> Listenable + 'a> for ProgramContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_program(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_program(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for ProgramContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_program } + //fn type_rule_index() -> usize where Self: Sized { RULE_program } +} +antlr_rust::tid!{ProgramContextExt<'a>} + +impl<'input> ProgramContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,ProgramContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait ProgramContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +fn pattern(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) +} +fn statement_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn statement(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} +fn statementSeparator_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn statementSeparator(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} + +} + +impl<'input> ProgramContextAttrs<'input> for ProgramContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn program(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = ProgramContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 4, RULE_program); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(103); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(6,&mut recog.base)?; + while { _alt!=2 && _alt!=INVALID_ALT } { + if _alt==1 { + { + { + /*InvokeRule statement*/ + recog.base.set_state(98); + recog.statement()?; + + /*InvokeRule statementSeparator*/ + recog.base.set_state(99); + recog.statementSeparator()?; + + } + } + } + recog.base.set_state(105); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(6,&mut recog.base)?; + } + /*InvokeRule pattern*/ + recog.base.set_state(106); + recog.pattern()?; + + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- statementSeparator ---------------- +pub type StatementSeparatorContextAll<'input> = StatementSeparatorContext<'input>; + + +pub type StatementSeparatorContext<'input> = BaseParserRuleContext<'input,StatementSeparatorContextExt<'input>>; + +#[derive(Clone)] +pub struct StatementSeparatorContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for StatementSeparatorContext<'input>{} + +impl<'input,'a> Listenable + 'a> for StatementSeparatorContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_statementSeparator(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_statementSeparator(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for StatementSeparatorContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_statementSeparator } + //fn type_rule_index() -> usize where Self: Sized { RULE_statementSeparator } +} +antlr_rust::tid!{StatementSeparatorContextExt<'a>} + +impl<'input> StatementSeparatorContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,StatementSeparatorContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait StatementSeparatorContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +/// Retrieves all `TerminalNode`s corresponding to token Newline in current rule +fn Newline_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +/// Retrieves 'i's TerminalNode corresponding to token Newline, starting from 0. +/// Returns `None` if number of children corresponding to token Newline is less or equal than `i`. +fn Newline(&self, i: usize) -> Option>> where Self:Sized{ + self.get_token(Newline, i) +} +/// Retrieves first TerminalNode corresponding to token Semicolon +/// Returns `None` if there is no child corresponding to token Semicolon +fn Semicolon(&self) -> Option>> where Self:Sized{ + self.get_token(Semicolon, 0) +} + +} + +impl<'input> StatementSeparatorContextAttrs<'input> for StatementSeparatorContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn statementSeparator(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = StatementSeparatorContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 6, RULE_statementSeparator); + let mut _localctx: Rc = _localctx; + let mut _la: isize = -1; + let result: Result<(), ANTLRError> = (|| { + + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(111); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(7,&mut recog.base)?; + while { _alt!=2 && _alt!=INVALID_ALT } { + if _alt==1 { + { + { + recog.base.set_state(108); + recog.base.match_token(Newline,&mut recog.err_handler)?; + + } + } + } + recog.base.set_state(113); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(7,&mut recog.base)?; + } + recog.base.set_state(122); + recog.err_handler.sync(&mut recog.base)?; + match recog.base.input.la(1) { + Newline + => { + { + recog.base.set_state(114); + recog.base.match_token(Newline,&mut recog.err_handler)?; + + } + } + + Semicolon + => { + { + recog.base.set_state(115); + recog.base.match_token(Semicolon,&mut recog.err_handler)?; + + recog.base.set_state(119); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + while _la==Newline { + { + { + recog.base.set_state(116); + recog.base.match_token(Newline,&mut recog.err_handler)?; + + } + } + recog.base.set_state(121); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + } + } + } + + _ => Err(ANTLRError::NoAltError(NoViableAltError::new(&mut recog.base)))? + } + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- statement ---------------- +#[derive(Debug)] +pub enum StatementContextAll<'input>{ + AssertContext(AssertContext<'input>), + NormalContext(NormalContext<'input>), + MatchContext(MatchContext<'input>), +Error(StatementContext<'input>) +} +antlr_rust::tid!{StatementContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for StatementContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for StatementContextAll<'input>{} + +impl<'input> Deref for StatementContextAll<'input>{ + type Target = dyn StatementContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use StatementContextAll::*; + match self{ + AssertContext(inner) => inner, + NormalContext(inner) => inner, + MatchContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for StatementContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type StatementContext<'input> = BaseParserRuleContext<'input,StatementContextExt<'input>>; + +#[derive(Clone)] +pub struct StatementContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for StatementContext<'input>{} + +impl<'input,'a> Listenable + 'a> for StatementContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for StatementContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_statement } + //fn type_rule_index() -> usize where Self: Sized { RULE_statement } +} +antlr_rust::tid!{StatementContextExt<'a>} + +impl<'input> StatementContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + StatementContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,StatementContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait StatementContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> StatementContextAttrs<'input> for StatementContext<'input>{} + +pub type AssertContext<'input> = BaseParserRuleContext<'input,AssertContextExt<'input>>; + +pub trait AssertContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Assert + /// Returns `None` if there is no child corresponding to token Assert + fn Assert(&self) -> Option>> where Self:Sized{ + self.get_token(Assert, 0) + } + fn pattern(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } +} + +impl<'input> AssertContextAttrs<'input> for AssertContext<'input>{} + +pub struct AssertContextExt<'input>{ + base:StatementContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{AssertContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for AssertContext<'input>{} + +impl<'input,'a> Listenable + 'a> for AssertContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Assert(self); + } +} + +impl<'input> CustomRuleContext<'input> for AssertContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_statement } + //fn type_rule_index() -> usize where Self: Sized { RULE_statement } +} + +impl<'input> Borrow> for AssertContext<'input>{ + fn borrow(&self) -> &StatementContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for AssertContext<'input>{ + fn borrow_mut(&mut self) -> &mut StatementContextExt<'input> { &mut self.base } +} + +impl<'input> StatementContextAttrs<'input> for AssertContext<'input> {} + +impl<'input> AssertContextExt<'input>{ + fn new(ctx: &dyn StatementContextAttrs<'input>) -> Rc> { + Rc::new( + StatementContextAll::AssertContext( + BaseParserRuleContext::copy_from(ctx,AssertContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type NormalContext<'input> = BaseParserRuleContext<'input,NormalContextExt<'input>>; + +pub trait NormalContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + fn pattern_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() + } + fn pattern(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) + } + /// Retrieves first TerminalNode corresponding to token Assign + /// Returns `None` if there is no child corresponding to token Assign + fn Assign(&self) -> Option>> where Self:Sized{ + self.get_token(Assign, 0) + } +} + +impl<'input> NormalContextAttrs<'input> for NormalContext<'input>{} + +pub struct NormalContextExt<'input>{ + base:StatementContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{NormalContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for NormalContext<'input>{} + +impl<'input,'a> Listenable + 'a> for NormalContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Normal(self); + } +} + +impl<'input> CustomRuleContext<'input> for NormalContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_statement } + //fn type_rule_index() -> usize where Self: Sized { RULE_statement } +} + +impl<'input> Borrow> for NormalContext<'input>{ + fn borrow(&self) -> &StatementContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for NormalContext<'input>{ + fn borrow_mut(&mut self) -> &mut StatementContextExt<'input> { &mut self.base } +} + +impl<'input> StatementContextAttrs<'input> for NormalContext<'input> {} + +impl<'input> NormalContextExt<'input>{ + fn new(ctx: &dyn StatementContextAttrs<'input>) -> Rc> { + Rc::new( + StatementContextAll::NormalContext( + BaseParserRuleContext::copy_from(ctx,NormalContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type MatchContext<'input> = BaseParserRuleContext<'input,MatchContextExt<'input>>; + +pub trait MatchContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Assert + /// Returns `None` if there is no child corresponding to token Assert + fn Assert(&self) -> Option>> where Self:Sized{ + self.get_token(Assert, 0) + } + fn pattern_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() + } + fn pattern(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) + } + /// Retrieves first TerminalNode corresponding to token Matches + /// Returns `None` if there is no child corresponding to token Matches + fn Matches(&self) -> Option>> where Self:Sized{ + self.get_token(Matches, 0) + } +} + +impl<'input> MatchContextAttrs<'input> for MatchContext<'input>{} + +pub struct MatchContextExt<'input>{ + base:StatementContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{MatchContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for MatchContext<'input>{} + +impl<'input,'a> Listenable + 'a> for MatchContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Match(self); + } +} + +impl<'input> CustomRuleContext<'input> for MatchContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_statement } + //fn type_rule_index() -> usize where Self: Sized { RULE_statement } +} + +impl<'input> Borrow> for MatchContext<'input>{ + fn borrow(&self) -> &StatementContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for MatchContext<'input>{ + fn borrow_mut(&mut self) -> &mut StatementContextExt<'input> { &mut self.base } +} + +impl<'input> StatementContextAttrs<'input> for MatchContext<'input> {} + +impl<'input> MatchContextExt<'input>{ + fn new(ctx: &dyn StatementContextAttrs<'input>) -> Rc> { + Rc::new( + StatementContextAll::MatchContext( + BaseParserRuleContext::copy_from(ctx,MatchContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn statement(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = StatementContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 8, RULE_statement); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + recog.base.set_state(135); + recog.err_handler.sync(&mut recog.base)?; + match recog.interpreter.adaptive_predict(10,&mut recog.base)? { + 1 =>{ + let tmp = NormalContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + /*InvokeRule pattern*/ + recog.base.set_state(124); + recog.pattern()?; + + recog.base.set_state(125); + recog.base.match_token(Assign,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(126); + recog.pattern()?; + + } + } + , + 2 =>{ + let tmp = MatchContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 2); + _localctx = tmp; + { + recog.base.set_state(128); + recog.base.match_token(Assert,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(129); + recog.pattern()?; + + recog.base.set_state(130); + recog.base.match_token(Matches,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(131); + recog.pattern()?; + + } + } + , + 3 =>{ + let tmp = AssertContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 3); + _localctx = tmp; + { + recog.base.set_state(133); + recog.base.match_token(Assert,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(134); + recog.pattern()?; + + } + } + + _ => {} + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- pattern ---------------- +pub type PatternContextAll<'input> = PatternContext<'input>; + + +pub type PatternContext<'input> = BaseParserRuleContext<'input,PatternContextExt<'input>>; + +#[derive(Clone)] +pub struct PatternContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for PatternContext<'input>{} + +impl<'input,'a> Listenable + 'a> for PatternContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_pattern(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_pattern(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for PatternContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_pattern } + //fn type_rule_index() -> usize where Self: Sized { RULE_pattern } +} +antlr_rust::tid!{PatternContextExt<'a>} + +impl<'input> PatternContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,PatternContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait PatternContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +fn patternOr(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) +} + +} + +impl<'input> PatternContextAttrs<'input> for PatternContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn pattern(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = PatternContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 10, RULE_pattern); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule patternOr*/ + recog.base.set_state(137); + recog.patternOr()?; + + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- patternOr ---------------- +pub type PatternOrContextAll<'input> = PatternOrContext<'input>; + + +pub type PatternOrContext<'input> = BaseParserRuleContext<'input,PatternOrContextExt<'input>>; + +#[derive(Clone)] +pub struct PatternOrContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for PatternOrContext<'input>{} + +impl<'input,'a> Listenable + 'a> for PatternOrContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_patternOr(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_patternOr(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for PatternOrContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternOr } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternOr } +} +antlr_rust::tid!{PatternOrContextExt<'a>} + +impl<'input> PatternOrContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,PatternOrContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait PatternOrContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +fn patternAnd_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn patternAnd(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} +fn operatorOr_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn operatorOr(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} + +} + +impl<'input> PatternOrContextAttrs<'input> for PatternOrContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn patternOr(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = PatternOrContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 12, RULE_patternOr); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule patternAnd*/ + recog.base.set_state(139); + recog.patternAnd()?; + + recog.base.set_state(145); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(11,&mut recog.base)?; + while { _alt!=2 && _alt!=INVALID_ALT } { + if _alt==1 { + { + { + /*InvokeRule operatorOr*/ + recog.base.set_state(140); + recog.operatorOr()?; + + /*InvokeRule patternAnd*/ + recog.base.set_state(141); + recog.patternAnd()?; + + } + } + } + recog.base.set_state(147); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(11,&mut recog.base)?; + } + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- operatorOr ---------------- +#[derive(Debug)] +pub enum OperatorOrContextAll<'input>{ + OrContext(OrContext<'input>), +Error(OperatorOrContext<'input>) +} +antlr_rust::tid!{OperatorOrContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for OperatorOrContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorOrContextAll<'input>{} + +impl<'input> Deref for OperatorOrContextAll<'input>{ + type Target = dyn OperatorOrContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use OperatorOrContextAll::*; + match self{ + OrContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for OperatorOrContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type OperatorOrContext<'input> = BaseParserRuleContext<'input,OperatorOrContextExt<'input>>; + +#[derive(Clone)] +pub struct OperatorOrContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorOrContext<'input>{} + +impl<'input,'a> Listenable + 'a> for OperatorOrContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for OperatorOrContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorOr } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorOr } +} +antlr_rust::tid!{OperatorOrContextExt<'a>} + +impl<'input> OperatorOrContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + OperatorOrContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,OperatorOrContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait OperatorOrContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> OperatorOrContextAttrs<'input> for OperatorOrContext<'input>{} + +pub type OrContext<'input> = BaseParserRuleContext<'input,OrContextExt<'input>>; + +pub trait OrContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token BooleanOr + /// Returns `None` if there is no child corresponding to token BooleanOr + fn BooleanOr(&self) -> Option>> where Self:Sized{ + self.get_token(BooleanOr, 0) + } +} + +impl<'input> OrContextAttrs<'input> for OrContext<'input>{} + +pub struct OrContextExt<'input>{ + base:OperatorOrContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{OrContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for OrContext<'input>{} + +impl<'input,'a> Listenable + 'a> for OrContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Or(self); + } +} + +impl<'input> CustomRuleContext<'input> for OrContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorOr } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorOr } +} + +impl<'input> Borrow> for OrContext<'input>{ + fn borrow(&self) -> &OperatorOrContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for OrContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorOrContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorOrContextAttrs<'input> for OrContext<'input> {} + +impl<'input> OrContextExt<'input>{ + fn new(ctx: &dyn OperatorOrContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorOrContextAll::OrContext( + BaseParserRuleContext::copy_from(ctx,OrContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn operatorOr(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = OperatorOrContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 14, RULE_operatorOr); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + let tmp = OrContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + recog.base.set_state(148); + recog.base.match_token(BooleanOr,&mut recog.err_handler)?; + + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- patternAnd ---------------- +pub type PatternAndContextAll<'input> = PatternAndContext<'input>; + + +pub type PatternAndContext<'input> = BaseParserRuleContext<'input,PatternAndContextExt<'input>>; + +#[derive(Clone)] +pub struct PatternAndContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for PatternAndContext<'input>{} + +impl<'input,'a> Listenable + 'a> for PatternAndContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_patternAnd(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_patternAnd(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for PatternAndContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternAnd } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternAnd } +} +antlr_rust::tid!{PatternAndContextExt<'a>} + +impl<'input> PatternAndContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,PatternAndContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait PatternAndContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +fn patternEqNeq_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn patternEqNeq(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} +fn operatorAnd_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn operatorAnd(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} + +} + +impl<'input> PatternAndContextAttrs<'input> for PatternAndContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn patternAnd(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = PatternAndContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 16, RULE_patternAnd); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule patternEqNeq*/ + recog.base.set_state(150); + recog.patternEqNeq()?; + + recog.base.set_state(156); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(12,&mut recog.base)?; + while { _alt!=2 && _alt!=INVALID_ALT } { + if _alt==1 { + { + { + /*InvokeRule operatorAnd*/ + recog.base.set_state(151); + recog.operatorAnd()?; + + /*InvokeRule patternEqNeq*/ + recog.base.set_state(152); + recog.patternEqNeq()?; + + } + } + } + recog.base.set_state(158); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(12,&mut recog.base)?; + } + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- operatorAnd ---------------- +#[derive(Debug)] +pub enum OperatorAndContextAll<'input>{ + AndContext(AndContext<'input>), +Error(OperatorAndContext<'input>) +} +antlr_rust::tid!{OperatorAndContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for OperatorAndContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorAndContextAll<'input>{} + +impl<'input> Deref for OperatorAndContextAll<'input>{ + type Target = dyn OperatorAndContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use OperatorAndContextAll::*; + match self{ + AndContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for OperatorAndContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type OperatorAndContext<'input> = BaseParserRuleContext<'input,OperatorAndContextExt<'input>>; + +#[derive(Clone)] +pub struct OperatorAndContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorAndContext<'input>{} + +impl<'input,'a> Listenable + 'a> for OperatorAndContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for OperatorAndContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorAnd } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorAnd } +} +antlr_rust::tid!{OperatorAndContextExt<'a>} + +impl<'input> OperatorAndContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + OperatorAndContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,OperatorAndContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait OperatorAndContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> OperatorAndContextAttrs<'input> for OperatorAndContext<'input>{} + +pub type AndContext<'input> = BaseParserRuleContext<'input,AndContextExt<'input>>; + +pub trait AndContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token BooleanAnd + /// Returns `None` if there is no child corresponding to token BooleanAnd + fn BooleanAnd(&self) -> Option>> where Self:Sized{ + self.get_token(BooleanAnd, 0) + } +} + +impl<'input> AndContextAttrs<'input> for AndContext<'input>{} + +pub struct AndContextExt<'input>{ + base:OperatorAndContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{AndContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for AndContext<'input>{} + +impl<'input,'a> Listenable + 'a> for AndContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_And(self); + } +} + +impl<'input> CustomRuleContext<'input> for AndContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorAnd } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorAnd } +} + +impl<'input> Borrow> for AndContext<'input>{ + fn borrow(&self) -> &OperatorAndContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for AndContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorAndContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorAndContextAttrs<'input> for AndContext<'input> {} + +impl<'input> AndContextExt<'input>{ + fn new(ctx: &dyn OperatorAndContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorAndContextAll::AndContext( + BaseParserRuleContext::copy_from(ctx,AndContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn operatorAnd(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = OperatorAndContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 18, RULE_operatorAnd); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + let tmp = AndContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + recog.base.set_state(159); + recog.base.match_token(BooleanAnd,&mut recog.err_handler)?; + + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- patternEqNeq ---------------- +pub type PatternEqNeqContextAll<'input> = PatternEqNeqContext<'input>; + + +pub type PatternEqNeqContext<'input> = BaseParserRuleContext<'input,PatternEqNeqContextExt<'input>>; + +#[derive(Clone)] +pub struct PatternEqNeqContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for PatternEqNeqContext<'input>{} + +impl<'input,'a> Listenable + 'a> for PatternEqNeqContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_patternEqNeq(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_patternEqNeq(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for PatternEqNeqContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternEqNeq } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternEqNeq } +} +antlr_rust::tid!{PatternEqNeqContextExt<'a>} + +impl<'input> PatternEqNeqContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,PatternEqNeqContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait PatternEqNeqContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +fn patternIneq_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn patternIneq(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} +fn operatorEqNeq_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn operatorEqNeq(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} + +} + +impl<'input> PatternEqNeqContextAttrs<'input> for PatternEqNeqContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn patternEqNeq(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = PatternEqNeqContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 20, RULE_patternEqNeq); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule patternIneq*/ + recog.base.set_state(161); + recog.patternIneq()?; + + recog.base.set_state(167); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(13,&mut recog.base)?; + while { _alt!=2 && _alt!=INVALID_ALT } { + if _alt==1 { + { + { + /*InvokeRule operatorEqNeq*/ + recog.base.set_state(162); + recog.operatorEqNeq()?; + + /*InvokeRule patternIneq*/ + recog.base.set_state(163); + recog.patternIneq()?; + + } + } + } + recog.base.set_state(169); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(13,&mut recog.base)?; + } + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- operatorEqNeq ---------------- +#[derive(Debug)] +pub enum OperatorEqNeqContextAll<'input>{ + NeqContext(NeqContext<'input>), + EqContext(EqContext<'input>), +Error(OperatorEqNeqContext<'input>) +} +antlr_rust::tid!{OperatorEqNeqContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for OperatorEqNeqContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorEqNeqContextAll<'input>{} + +impl<'input> Deref for OperatorEqNeqContextAll<'input>{ + type Target = dyn OperatorEqNeqContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use OperatorEqNeqContextAll::*; + match self{ + NeqContext(inner) => inner, + EqContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for OperatorEqNeqContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type OperatorEqNeqContext<'input> = BaseParserRuleContext<'input,OperatorEqNeqContextExt<'input>>; + +#[derive(Clone)] +pub struct OperatorEqNeqContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorEqNeqContext<'input>{} + +impl<'input,'a> Listenable + 'a> for OperatorEqNeqContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for OperatorEqNeqContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorEqNeq } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorEqNeq } +} +antlr_rust::tid!{OperatorEqNeqContextExt<'a>} + +impl<'input> OperatorEqNeqContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + OperatorEqNeqContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,OperatorEqNeqContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait OperatorEqNeqContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> OperatorEqNeqContextAttrs<'input> for OperatorEqNeqContext<'input>{} + +pub type NeqContext<'input> = BaseParserRuleContext<'input,NeqContextExt<'input>>; + +pub trait NeqContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token NotEqual + /// Returns `None` if there is no child corresponding to token NotEqual + fn NotEqual(&self) -> Option>> where Self:Sized{ + self.get_token(NotEqual, 0) + } +} + +impl<'input> NeqContextAttrs<'input> for NeqContext<'input>{} + +pub struct NeqContextExt<'input>{ + base:OperatorEqNeqContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{NeqContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for NeqContext<'input>{} + +impl<'input,'a> Listenable + 'a> for NeqContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Neq(self); + } +} + +impl<'input> CustomRuleContext<'input> for NeqContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorEqNeq } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorEqNeq } +} + +impl<'input> Borrow> for NeqContext<'input>{ + fn borrow(&self) -> &OperatorEqNeqContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for NeqContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorEqNeqContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorEqNeqContextAttrs<'input> for NeqContext<'input> {} + +impl<'input> NeqContextExt<'input>{ + fn new(ctx: &dyn OperatorEqNeqContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorEqNeqContextAll::NeqContext( + BaseParserRuleContext::copy_from(ctx,NeqContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type EqContext<'input> = BaseParserRuleContext<'input,EqContextExt<'input>>; + +pub trait EqContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Equal + /// Returns `None` if there is no child corresponding to token Equal + fn Equal(&self) -> Option>> where Self:Sized{ + self.get_token(Equal, 0) + } +} + +impl<'input> EqContextAttrs<'input> for EqContext<'input>{} + +pub struct EqContextExt<'input>{ + base:OperatorEqNeqContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{EqContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for EqContext<'input>{} + +impl<'input,'a> Listenable + 'a> for EqContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Eq(self); + } +} + +impl<'input> CustomRuleContext<'input> for EqContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorEqNeq } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorEqNeq } +} + +impl<'input> Borrow> for EqContext<'input>{ + fn borrow(&self) -> &OperatorEqNeqContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for EqContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorEqNeqContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorEqNeqContextAttrs<'input> for EqContext<'input> {} + +impl<'input> EqContextExt<'input>{ + fn new(ctx: &dyn OperatorEqNeqContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorEqNeqContextAll::EqContext( + BaseParserRuleContext::copy_from(ctx,EqContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn operatorEqNeq(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = OperatorEqNeqContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 22, RULE_operatorEqNeq); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + recog.base.set_state(172); + recog.err_handler.sync(&mut recog.base)?; + match recog.base.input.la(1) { + Equal + => { + let tmp = EqContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + recog.base.set_state(170); + recog.base.match_token(Equal,&mut recog.err_handler)?; + + } + } + + NotEqual + => { + let tmp = NeqContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 2); + _localctx = tmp; + { + recog.base.set_state(171); + recog.base.match_token(NotEqual,&mut recog.err_handler)?; + + } + } + + _ => Err(ANTLRError::NoAltError(NoViableAltError::new(&mut recog.base)))? + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- patternIneq ---------------- +pub type PatternIneqContextAll<'input> = PatternIneqContext<'input>; + + +pub type PatternIneqContext<'input> = BaseParserRuleContext<'input,PatternIneqContextExt<'input>>; + +#[derive(Clone)] +pub struct PatternIneqContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for PatternIneqContext<'input>{} + +impl<'input,'a> Listenable + 'a> for PatternIneqContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_patternIneq(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_patternIneq(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for PatternIneqContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternIneq } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternIneq } +} +antlr_rust::tid!{PatternIneqContextExt<'a>} + +impl<'input> PatternIneqContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,PatternIneqContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait PatternIneqContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +fn patternAddSub_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn patternAddSub(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} +fn operatorIneq_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn operatorIneq(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} + +} + +impl<'input> PatternIneqContextAttrs<'input> for PatternIneqContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn patternIneq(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = PatternIneqContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 24, RULE_patternIneq); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule patternAddSub*/ + recog.base.set_state(174); + recog.patternAddSub()?; + + recog.base.set_state(180); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(15,&mut recog.base)?; + while { _alt!=2 && _alt!=INVALID_ALT } { + if _alt==1 { + { + { + /*InvokeRule operatorIneq*/ + recog.base.set_state(175); + recog.operatorIneq()?; + + /*InvokeRule patternAddSub*/ + recog.base.set_state(176); + recog.patternAddSub()?; + + } + } + } + recog.base.set_state(182); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(15,&mut recog.base)?; + } + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- operatorIneq ---------------- +#[derive(Debug)] +pub enum OperatorIneqContextAll<'input>{ + LtContext(LtContext<'input>), + LeContext(LeContext<'input>), + GtContext(GtContext<'input>), + GeContext(GeContext<'input>), +Error(OperatorIneqContext<'input>) +} +antlr_rust::tid!{OperatorIneqContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for OperatorIneqContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorIneqContextAll<'input>{} + +impl<'input> Deref for OperatorIneqContextAll<'input>{ + type Target = dyn OperatorIneqContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use OperatorIneqContextAll::*; + match self{ + LtContext(inner) => inner, + LeContext(inner) => inner, + GtContext(inner) => inner, + GeContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for OperatorIneqContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type OperatorIneqContext<'input> = BaseParserRuleContext<'input,OperatorIneqContextExt<'input>>; + +#[derive(Clone)] +pub struct OperatorIneqContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorIneqContext<'input>{} + +impl<'input,'a> Listenable + 'a> for OperatorIneqContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for OperatorIneqContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorIneq } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorIneq } +} +antlr_rust::tid!{OperatorIneqContextExt<'a>} + +impl<'input> OperatorIneqContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + OperatorIneqContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,OperatorIneqContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait OperatorIneqContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> OperatorIneqContextAttrs<'input> for OperatorIneqContext<'input>{} + +pub type LtContext<'input> = BaseParserRuleContext<'input,LtContextExt<'input>>; + +pub trait LtContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token LessThan + /// Returns `None` if there is no child corresponding to token LessThan + fn LessThan(&self) -> Option>> where Self:Sized{ + self.get_token(LessThan, 0) + } +} + +impl<'input> LtContextAttrs<'input> for LtContext<'input>{} + +pub struct LtContextExt<'input>{ + base:OperatorIneqContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{LtContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for LtContext<'input>{} + +impl<'input,'a> Listenable + 'a> for LtContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Lt(self); + } +} + +impl<'input> CustomRuleContext<'input> for LtContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorIneq } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorIneq } +} + +impl<'input> Borrow> for LtContext<'input>{ + fn borrow(&self) -> &OperatorIneqContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for LtContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorIneqContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorIneqContextAttrs<'input> for LtContext<'input> {} + +impl<'input> LtContextExt<'input>{ + fn new(ctx: &dyn OperatorIneqContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorIneqContextAll::LtContext( + BaseParserRuleContext::copy_from(ctx,LtContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type LeContext<'input> = BaseParserRuleContext<'input,LeContextExt<'input>>; + +pub trait LeContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token LessEqual + /// Returns `None` if there is no child corresponding to token LessEqual + fn LessEqual(&self) -> Option>> where Self:Sized{ + self.get_token(LessEqual, 0) + } +} + +impl<'input> LeContextAttrs<'input> for LeContext<'input>{} + +pub struct LeContextExt<'input>{ + base:OperatorIneqContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{LeContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for LeContext<'input>{} + +impl<'input,'a> Listenable + 'a> for LeContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Le(self); + } +} + +impl<'input> CustomRuleContext<'input> for LeContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorIneq } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorIneq } +} + +impl<'input> Borrow> for LeContext<'input>{ + fn borrow(&self) -> &OperatorIneqContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for LeContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorIneqContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorIneqContextAttrs<'input> for LeContext<'input> {} + +impl<'input> LeContextExt<'input>{ + fn new(ctx: &dyn OperatorIneqContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorIneqContextAll::LeContext( + BaseParserRuleContext::copy_from(ctx,LeContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type GtContext<'input> = BaseParserRuleContext<'input,GtContextExt<'input>>; + +pub trait GtContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token GreaterThan + /// Returns `None` if there is no child corresponding to token GreaterThan + fn GreaterThan(&self) -> Option>> where Self:Sized{ + self.get_token(GreaterThan, 0) + } +} + +impl<'input> GtContextAttrs<'input> for GtContext<'input>{} + +pub struct GtContextExt<'input>{ + base:OperatorIneqContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{GtContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for GtContext<'input>{} + +impl<'input,'a> Listenable + 'a> for GtContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Gt(self); + } +} + +impl<'input> CustomRuleContext<'input> for GtContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorIneq } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorIneq } +} + +impl<'input> Borrow> for GtContext<'input>{ + fn borrow(&self) -> &OperatorIneqContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for GtContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorIneqContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorIneqContextAttrs<'input> for GtContext<'input> {} + +impl<'input> GtContextExt<'input>{ + fn new(ctx: &dyn OperatorIneqContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorIneqContextAll::GtContext( + BaseParserRuleContext::copy_from(ctx,GtContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type GeContext<'input> = BaseParserRuleContext<'input,GeContextExt<'input>>; + +pub trait GeContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token GreaterEqual + /// Returns `None` if there is no child corresponding to token GreaterEqual + fn GreaterEqual(&self) -> Option>> where Self:Sized{ + self.get_token(GreaterEqual, 0) + } +} + +impl<'input> GeContextAttrs<'input> for GeContext<'input>{} + +pub struct GeContextExt<'input>{ + base:OperatorIneqContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{GeContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for GeContext<'input>{} + +impl<'input,'a> Listenable + 'a> for GeContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Ge(self); + } +} + +impl<'input> CustomRuleContext<'input> for GeContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorIneq } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorIneq } +} + +impl<'input> Borrow> for GeContext<'input>{ + fn borrow(&self) -> &OperatorIneqContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for GeContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorIneqContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorIneqContextAttrs<'input> for GeContext<'input> {} + +impl<'input> GeContextExt<'input>{ + fn new(ctx: &dyn OperatorIneqContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorIneqContextAll::GeContext( + BaseParserRuleContext::copy_from(ctx,GeContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn operatorIneq(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = OperatorIneqContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 26, RULE_operatorIneq); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + recog.base.set_state(187); + recog.err_handler.sync(&mut recog.base)?; + match recog.base.input.la(1) { + LessThan + => { + let tmp = LtContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + recog.base.set_state(183); + recog.base.match_token(LessThan,&mut recog.err_handler)?; + + } + } + + LessEqual + => { + let tmp = LeContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 2); + _localctx = tmp; + { + recog.base.set_state(184); + recog.base.match_token(LessEqual,&mut recog.err_handler)?; + + } + } + + GreaterThan + => { + let tmp = GtContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 3); + _localctx = tmp; + { + recog.base.set_state(185); + recog.base.match_token(GreaterThan,&mut recog.err_handler)?; + + } + } + + GreaterEqual + => { + let tmp = GeContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 4); + _localctx = tmp; + { + recog.base.set_state(186); + recog.base.match_token(GreaterEqual,&mut recog.err_handler)?; + + } + } + + _ => Err(ANTLRError::NoAltError(NoViableAltError::new(&mut recog.base)))? + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- patternAddSub ---------------- +pub type PatternAddSubContextAll<'input> = PatternAddSubContext<'input>; + + +pub type PatternAddSubContext<'input> = BaseParserRuleContext<'input,PatternAddSubContextExt<'input>>; + +#[derive(Clone)] +pub struct PatternAddSubContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for PatternAddSubContext<'input>{} + +impl<'input,'a> Listenable + 'a> for PatternAddSubContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_patternAddSub(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_patternAddSub(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for PatternAddSubContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternAddSub } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternAddSub } +} +antlr_rust::tid!{PatternAddSubContextExt<'a>} + +impl<'input> PatternAddSubContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,PatternAddSubContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait PatternAddSubContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +fn patternMulDiv_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn patternMulDiv(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} +fn operatorAddSub_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn operatorAddSub(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} + +} + +impl<'input> PatternAddSubContextAttrs<'input> for PatternAddSubContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn patternAddSub(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = PatternAddSubContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 28, RULE_patternAddSub); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule patternMulDiv*/ + recog.base.set_state(189); + recog.patternMulDiv()?; + + recog.base.set_state(195); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(17,&mut recog.base)?; + while { _alt!=2 && _alt!=INVALID_ALT } { + if _alt==1 { + { + { + /*InvokeRule operatorAddSub*/ + recog.base.set_state(190); + recog.operatorAddSub()?; + + /*InvokeRule patternMulDiv*/ + recog.base.set_state(191); + recog.patternMulDiv()?; + + } + } + } + recog.base.set_state(197); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(17,&mut recog.base)?; + } + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- operatorAddSub ---------------- +#[derive(Debug)] +pub enum OperatorAddSubContextAll<'input>{ + AddContext(AddContext<'input>), + SubContext(SubContext<'input>), +Error(OperatorAddSubContext<'input>) +} +antlr_rust::tid!{OperatorAddSubContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for OperatorAddSubContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorAddSubContextAll<'input>{} + +impl<'input> Deref for OperatorAddSubContextAll<'input>{ + type Target = dyn OperatorAddSubContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use OperatorAddSubContextAll::*; + match self{ + AddContext(inner) => inner, + SubContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for OperatorAddSubContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type OperatorAddSubContext<'input> = BaseParserRuleContext<'input,OperatorAddSubContextExt<'input>>; + +#[derive(Clone)] +pub struct OperatorAddSubContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorAddSubContext<'input>{} + +impl<'input,'a> Listenable + 'a> for OperatorAddSubContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for OperatorAddSubContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorAddSub } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorAddSub } +} +antlr_rust::tid!{OperatorAddSubContextExt<'a>} + +impl<'input> OperatorAddSubContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + OperatorAddSubContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,OperatorAddSubContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait OperatorAddSubContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> OperatorAddSubContextAttrs<'input> for OperatorAddSubContext<'input>{} + +pub type AddContext<'input> = BaseParserRuleContext<'input,AddContextExt<'input>>; + +pub trait AddContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Plus + /// Returns `None` if there is no child corresponding to token Plus + fn Plus(&self) -> Option>> where Self:Sized{ + self.get_token(Plus, 0) + } +} + +impl<'input> AddContextAttrs<'input> for AddContext<'input>{} + +pub struct AddContextExt<'input>{ + base:OperatorAddSubContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{AddContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for AddContext<'input>{} + +impl<'input,'a> Listenable + 'a> for AddContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Add(self); + } +} + +impl<'input> CustomRuleContext<'input> for AddContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorAddSub } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorAddSub } +} + +impl<'input> Borrow> for AddContext<'input>{ + fn borrow(&self) -> &OperatorAddSubContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for AddContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorAddSubContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorAddSubContextAttrs<'input> for AddContext<'input> {} + +impl<'input> AddContextExt<'input>{ + fn new(ctx: &dyn OperatorAddSubContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorAddSubContextAll::AddContext( + BaseParserRuleContext::copy_from(ctx,AddContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type SubContext<'input> = BaseParserRuleContext<'input,SubContextExt<'input>>; + +pub trait SubContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Minus + /// Returns `None` if there is no child corresponding to token Minus + fn Minus(&self) -> Option>> where Self:Sized{ + self.get_token(Minus, 0) + } +} + +impl<'input> SubContextAttrs<'input> for SubContext<'input>{} + +pub struct SubContextExt<'input>{ + base:OperatorAddSubContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{SubContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for SubContext<'input>{} + +impl<'input,'a> Listenable + 'a> for SubContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Sub(self); + } +} + +impl<'input> CustomRuleContext<'input> for SubContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorAddSub } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorAddSub } +} + +impl<'input> Borrow> for SubContext<'input>{ + fn borrow(&self) -> &OperatorAddSubContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for SubContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorAddSubContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorAddSubContextAttrs<'input> for SubContext<'input> {} + +impl<'input> SubContextExt<'input>{ + fn new(ctx: &dyn OperatorAddSubContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorAddSubContextAll::SubContext( + BaseParserRuleContext::copy_from(ctx,SubContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn operatorAddSub(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = OperatorAddSubContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 30, RULE_operatorAddSub); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + recog.base.set_state(200); + recog.err_handler.sync(&mut recog.base)?; + match recog.base.input.la(1) { + Plus + => { + let tmp = AddContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + recog.base.set_state(198); + recog.base.match_token(Plus,&mut recog.err_handler)?; + + } + } + + Minus + => { + let tmp = SubContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 2); + _localctx = tmp; + { + recog.base.set_state(199); + recog.base.match_token(Minus,&mut recog.err_handler)?; + + } + } + + _ => Err(ANTLRError::NoAltError(NoViableAltError::new(&mut recog.base)))? + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- patternMulDiv ---------------- +pub type PatternMulDivContextAll<'input> = PatternMulDivContext<'input>; + + +pub type PatternMulDivContext<'input> = BaseParserRuleContext<'input,PatternMulDivContextExt<'input>>; + +#[derive(Clone)] +pub struct PatternMulDivContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for PatternMulDivContext<'input>{} + +impl<'input,'a> Listenable + 'a> for PatternMulDivContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_patternMulDiv(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_patternMulDiv(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for PatternMulDivContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMulDiv } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMulDiv } +} +antlr_rust::tid!{PatternMulDivContextExt<'a>} + +impl<'input> PatternMulDivContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,PatternMulDivContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait PatternMulDivContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +fn patternMisc_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn patternMisc(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} +fn operatorMulDiv_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn operatorMulDiv(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} + +} + +impl<'input> PatternMulDivContextAttrs<'input> for PatternMulDivContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn patternMulDiv(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = PatternMulDivContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 32, RULE_patternMulDiv); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule patternMisc*/ + recog.base.set_state(202); + recog.patternMisc()?; + + recog.base.set_state(208); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(19,&mut recog.base)?; + while { _alt!=2 && _alt!=INVALID_ALT } { + if _alt==1 { + { + { + /*InvokeRule operatorMulDiv*/ + recog.base.set_state(203); + recog.operatorMulDiv()?; + + /*InvokeRule patternMisc*/ + recog.base.set_state(204); + recog.patternMisc()?; + + } + } + } + recog.base.set_state(210); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(19,&mut recog.base)?; + } + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- operatorMulDiv ---------------- +#[derive(Debug)] +pub enum OperatorMulDivContextAll<'input>{ + DivContext(DivContext<'input>), + MulContext(MulContext<'input>), +Error(OperatorMulDivContext<'input>) +} +antlr_rust::tid!{OperatorMulDivContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for OperatorMulDivContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorMulDivContextAll<'input>{} + +impl<'input> Deref for OperatorMulDivContextAll<'input>{ + type Target = dyn OperatorMulDivContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use OperatorMulDivContextAll::*; + match self{ + DivContext(inner) => inner, + MulContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for OperatorMulDivContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type OperatorMulDivContext<'input> = BaseParserRuleContext<'input,OperatorMulDivContextExt<'input>>; + +#[derive(Clone)] +pub struct OperatorMulDivContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for OperatorMulDivContext<'input>{} + +impl<'input,'a> Listenable + 'a> for OperatorMulDivContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for OperatorMulDivContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorMulDiv } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorMulDiv } +} +antlr_rust::tid!{OperatorMulDivContextExt<'a>} + +impl<'input> OperatorMulDivContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + OperatorMulDivContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,OperatorMulDivContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait OperatorMulDivContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> OperatorMulDivContextAttrs<'input> for OperatorMulDivContext<'input>{} + +pub type DivContext<'input> = BaseParserRuleContext<'input,DivContextExt<'input>>; + +pub trait DivContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Divide + /// Returns `None` if there is no child corresponding to token Divide + fn Divide(&self) -> Option>> where Self:Sized{ + self.get_token(Divide, 0) + } +} + +impl<'input> DivContextAttrs<'input> for DivContext<'input>{} + +pub struct DivContextExt<'input>{ + base:OperatorMulDivContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{DivContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for DivContext<'input>{} + +impl<'input,'a> Listenable + 'a> for DivContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Div(self); + } +} + +impl<'input> CustomRuleContext<'input> for DivContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorMulDiv } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorMulDiv } +} + +impl<'input> Borrow> for DivContext<'input>{ + fn borrow(&self) -> &OperatorMulDivContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for DivContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorMulDivContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorMulDivContextAttrs<'input> for DivContext<'input> {} + +impl<'input> DivContextExt<'input>{ + fn new(ctx: &dyn OperatorMulDivContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorMulDivContextAll::DivContext( + BaseParserRuleContext::copy_from(ctx,DivContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type MulContext<'input> = BaseParserRuleContext<'input,MulContextExt<'input>>; + +pub trait MulContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Multiply + /// Returns `None` if there is no child corresponding to token Multiply + fn Multiply(&self) -> Option>> where Self:Sized{ + self.get_token(Multiply, 0) + } +} + +impl<'input> MulContextAttrs<'input> for MulContext<'input>{} + +pub struct MulContextExt<'input>{ + base:OperatorMulDivContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{MulContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for MulContext<'input>{} + +impl<'input,'a> Listenable + 'a> for MulContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Mul(self); + } +} + +impl<'input> CustomRuleContext<'input> for MulContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_operatorMulDiv } + //fn type_rule_index() -> usize where Self: Sized { RULE_operatorMulDiv } +} + +impl<'input> Borrow> for MulContext<'input>{ + fn borrow(&self) -> &OperatorMulDivContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for MulContext<'input>{ + fn borrow_mut(&mut self) -> &mut OperatorMulDivContextExt<'input> { &mut self.base } +} + +impl<'input> OperatorMulDivContextAttrs<'input> for MulContext<'input> {} + +impl<'input> MulContextExt<'input>{ + fn new(ctx: &dyn OperatorMulDivContextAttrs<'input>) -> Rc> { + Rc::new( + OperatorMulDivContextAll::MulContext( + BaseParserRuleContext::copy_from(ctx,MulContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn operatorMulDiv(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = OperatorMulDivContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 34, RULE_operatorMulDiv); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + recog.base.set_state(213); + recog.err_handler.sync(&mut recog.base)?; + match recog.base.input.la(1) { + Multiply + => { + let tmp = MulContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + recog.base.set_state(211); + recog.base.match_token(Multiply,&mut recog.err_handler)?; + + } + } + + Divide + => { + let tmp = DivContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 2); + _localctx = tmp; + { + recog.base.set_state(212); + recog.base.match_token(Divide,&mut recog.err_handler)?; + + } + } + + _ => Err(ANTLRError::NoAltError(NoViableAltError::new(&mut recog.base)))? + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- patternMisc ---------------- +#[derive(Debug)] +pub enum PatternMiscContextAll<'input>{ + ParenthesesContext(ParenthesesContext<'input>), + IntRangeContext(IntRangeContext<'input>), + UnaryNegateContext(UnaryNegateContext<'input>), + StrExactlyContext(StrExactlyContext<'input>), + IfThenElseContext(IfThenElseContext<'input>), + BoolFalseContext(BoolFalseContext<'input>), + EnumAnyContext(EnumAnyContext<'input>), + DtAnyContext(DtAnyContext<'input>), + AnyContext(AnyContext<'input>), + IntAnyContext(IntAnyContext<'input>), + InconsistentContext(InconsistentContext<'input>), + DatatypeBindingOrConstantContext(DatatypeBindingOrConstantContext<'input>), + EnumSetContext(EnumSetContext<'input>), + StrAnyContext(StrAnyContext<'input>), + BoolTrueContext(BoolTrueContext<'input>), + IntAtMostContext(IntAtMostContext<'input>), + IntAtLeastContext(IntAtLeastContext<'input>), + IntExactlyContext(IntExactlyContext<'input>), + FunctionContext(FunctionContext<'input>), + BoolAnyContext(BoolAnyContext<'input>), + UnaryNotContext(UnaryNotContext<'input>), +Error(PatternMiscContext<'input>) +} +antlr_rust::tid!{PatternMiscContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for PatternMiscContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for PatternMiscContextAll<'input>{} + +impl<'input> Deref for PatternMiscContextAll<'input>{ + type Target = dyn PatternMiscContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use PatternMiscContextAll::*; + match self{ + ParenthesesContext(inner) => inner, + IntRangeContext(inner) => inner, + UnaryNegateContext(inner) => inner, + StrExactlyContext(inner) => inner, + IfThenElseContext(inner) => inner, + BoolFalseContext(inner) => inner, + EnumAnyContext(inner) => inner, + DtAnyContext(inner) => inner, + AnyContext(inner) => inner, + IntAnyContext(inner) => inner, + InconsistentContext(inner) => inner, + DatatypeBindingOrConstantContext(inner) => inner, + EnumSetContext(inner) => inner, + StrAnyContext(inner) => inner, + BoolTrueContext(inner) => inner, + IntAtMostContext(inner) => inner, + IntAtLeastContext(inner) => inner, + IntExactlyContext(inner) => inner, + FunctionContext(inner) => inner, + BoolAnyContext(inner) => inner, + UnaryNotContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for PatternMiscContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type PatternMiscContext<'input> = BaseParserRuleContext<'input,PatternMiscContextExt<'input>>; + +#[derive(Clone)] +pub struct PatternMiscContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for PatternMiscContext<'input>{} + +impl<'input,'a> Listenable + 'a> for PatternMiscContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for PatternMiscContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} +antlr_rust::tid!{PatternMiscContextExt<'a>} + +impl<'input> PatternMiscContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + PatternMiscContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,PatternMiscContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait PatternMiscContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> PatternMiscContextAttrs<'input> for PatternMiscContext<'input>{} + +pub type ParenthesesContext<'input> = BaseParserRuleContext<'input,ParenthesesContextExt<'input>>; + +pub trait ParenthesesContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token OpenParen + /// Returns `None` if there is no child corresponding to token OpenParen + fn OpenParen(&self) -> Option>> where Self:Sized{ + self.get_token(OpenParen, 0) + } + fn pattern(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } + /// Retrieves first TerminalNode corresponding to token CloseParen + /// Returns `None` if there is no child corresponding to token CloseParen + fn CloseParen(&self) -> Option>> where Self:Sized{ + self.get_token(CloseParen, 0) + } +} + +impl<'input> ParenthesesContextAttrs<'input> for ParenthesesContext<'input>{} + +pub struct ParenthesesContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{ParenthesesContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for ParenthesesContext<'input>{} + +impl<'input,'a> Listenable + 'a> for ParenthesesContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_parentheses(self); + } +} + +impl<'input> CustomRuleContext<'input> for ParenthesesContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for ParenthesesContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for ParenthesesContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for ParenthesesContext<'input> {} + +impl<'input> ParenthesesContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::ParenthesesContext( + BaseParserRuleContext::copy_from(ctx,ParenthesesContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type IntRangeContext<'input> = BaseParserRuleContext<'input,IntRangeContextExt<'input>>; + +pub trait IntRangeContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + fn integer_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() + } + fn integer(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) + } + /// Retrieves first TerminalNode corresponding to token Range + /// Returns `None` if there is no child corresponding to token Range + fn Range(&self) -> Option>> where Self:Sized{ + self.get_token(Range, 0) + } +} + +impl<'input> IntRangeContextAttrs<'input> for IntRangeContext<'input>{} + +pub struct IntRangeContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{IntRangeContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for IntRangeContext<'input>{} + +impl<'input,'a> Listenable + 'a> for IntRangeContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_intRange(self); + } +} + +impl<'input> CustomRuleContext<'input> for IntRangeContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for IntRangeContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for IntRangeContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for IntRangeContext<'input> {} + +impl<'input> IntRangeContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::IntRangeContext( + BaseParserRuleContext::copy_from(ctx,IntRangeContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type UnaryNegateContext<'input> = BaseParserRuleContext<'input,UnaryNegateContextExt<'input>>; + +pub trait UnaryNegateContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Minus + /// Returns `None` if there is no child corresponding to token Minus + fn Minus(&self) -> Option>> where Self:Sized{ + self.get_token(Minus, 0) + } + fn pattern(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } +} + +impl<'input> UnaryNegateContextAttrs<'input> for UnaryNegateContext<'input>{} + +pub struct UnaryNegateContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{UnaryNegateContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for UnaryNegateContext<'input>{} + +impl<'input,'a> Listenable + 'a> for UnaryNegateContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_unaryNegate(self); + } +} + +impl<'input> CustomRuleContext<'input> for UnaryNegateContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for UnaryNegateContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for UnaryNegateContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for UnaryNegateContext<'input> {} + +impl<'input> UnaryNegateContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::UnaryNegateContext( + BaseParserRuleContext::copy_from(ctx,UnaryNegateContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type StrExactlyContext<'input> = BaseParserRuleContext<'input,StrExactlyContextExt<'input>>; + +pub trait StrExactlyContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token String + /// Returns `None` if there is no child corresponding to token String + fn String(&self) -> Option>> where Self:Sized{ + self.get_token(String, 0) + } +} + +impl<'input> StrExactlyContextAttrs<'input> for StrExactlyContext<'input>{} + +pub struct StrExactlyContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{StrExactlyContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for StrExactlyContext<'input>{} + +impl<'input,'a> Listenable + 'a> for StrExactlyContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_strExactly(self); + } +} + +impl<'input> CustomRuleContext<'input> for StrExactlyContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for StrExactlyContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for StrExactlyContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for StrExactlyContext<'input> {} + +impl<'input> StrExactlyContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::StrExactlyContext( + BaseParserRuleContext::copy_from(ctx,StrExactlyContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type IfThenElseContext<'input> = BaseParserRuleContext<'input,IfThenElseContextExt<'input>>; + +pub trait IfThenElseContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token If + /// Returns `None` if there is no child corresponding to token If + fn If(&self) -> Option>> where Self:Sized{ + self.get_token(If, 0) + } + fn pattern_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() + } + fn pattern(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) + } + /// Retrieves first TerminalNode corresponding to token Then + /// Returns `None` if there is no child corresponding to token Then + fn Then(&self) -> Option>> where Self:Sized{ + self.get_token(Then, 0) + } + /// Retrieves first TerminalNode corresponding to token Else + /// Returns `None` if there is no child corresponding to token Else + fn Else(&self) -> Option>> where Self:Sized{ + self.get_token(Else, 0) + } +} + +impl<'input> IfThenElseContextAttrs<'input> for IfThenElseContext<'input>{} + +pub struct IfThenElseContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{IfThenElseContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for IfThenElseContext<'input>{} + +impl<'input,'a> Listenable + 'a> for IfThenElseContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_ifThenElse(self); + } +} + +impl<'input> CustomRuleContext<'input> for IfThenElseContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for IfThenElseContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for IfThenElseContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for IfThenElseContext<'input> {} + +impl<'input> IfThenElseContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::IfThenElseContext( + BaseParserRuleContext::copy_from(ctx,IfThenElseContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type BoolFalseContext<'input> = BaseParserRuleContext<'input,BoolFalseContextExt<'input>>; + +pub trait BoolFalseContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token False + /// Returns `None` if there is no child corresponding to token False + fn False(&self) -> Option>> where Self:Sized{ + self.get_token(False, 0) + } +} + +impl<'input> BoolFalseContextAttrs<'input> for BoolFalseContext<'input>{} + +pub struct BoolFalseContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{BoolFalseContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for BoolFalseContext<'input>{} + +impl<'input,'a> Listenable + 'a> for BoolFalseContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_boolFalse(self); + } +} + +impl<'input> CustomRuleContext<'input> for BoolFalseContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for BoolFalseContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for BoolFalseContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for BoolFalseContext<'input> {} + +impl<'input> BoolFalseContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::BoolFalseContext( + BaseParserRuleContext::copy_from(ctx,BoolFalseContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type EnumAnyContext<'input> = BaseParserRuleContext<'input,EnumAnyContextExt<'input>>; + +pub trait EnumAnyContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Metaenum + /// Returns `None` if there is no child corresponding to token Metaenum + fn Metaenum(&self) -> Option>> where Self:Sized{ + self.get_token(Metaenum, 0) + } +} + +impl<'input> EnumAnyContextAttrs<'input> for EnumAnyContext<'input>{} + +pub struct EnumAnyContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{EnumAnyContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for EnumAnyContext<'input>{} + +impl<'input,'a> Listenable + 'a> for EnumAnyContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_enumAny(self); + } +} + +impl<'input> CustomRuleContext<'input> for EnumAnyContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for EnumAnyContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for EnumAnyContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for EnumAnyContext<'input> {} + +impl<'input> EnumAnyContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::EnumAnyContext( + BaseParserRuleContext::copy_from(ctx,EnumAnyContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type DtAnyContext<'input> = BaseParserRuleContext<'input,DtAnyContextExt<'input>>; + +pub trait DtAnyContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Typename + /// Returns `None` if there is no child corresponding to token Typename + fn Typename(&self) -> Option>> where Self:Sized{ + self.get_token(Typename, 0) + } +} + +impl<'input> DtAnyContextAttrs<'input> for DtAnyContext<'input>{} + +pub struct DtAnyContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{DtAnyContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for DtAnyContext<'input>{} + +impl<'input,'a> Listenable + 'a> for DtAnyContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_dtAny(self); + } +} + +impl<'input> CustomRuleContext<'input> for DtAnyContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for DtAnyContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for DtAnyContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for DtAnyContext<'input> {} + +impl<'input> DtAnyContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::DtAnyContext( + BaseParserRuleContext::copy_from(ctx,DtAnyContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type AnyContext<'input> = BaseParserRuleContext<'input,AnyContextExt<'input>>; + +pub trait AnyContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Question + /// Returns `None` if there is no child corresponding to token Question + fn Question(&self) -> Option>> where Self:Sized{ + self.get_token(Question, 0) + } +} + +impl<'input> AnyContextAttrs<'input> for AnyContext<'input>{} + +pub struct AnyContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{AnyContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for AnyContext<'input>{} + +impl<'input,'a> Listenable + 'a> for AnyContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_any(self); + } +} + +impl<'input> CustomRuleContext<'input> for AnyContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for AnyContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for AnyContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for AnyContext<'input> {} + +impl<'input> AnyContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::AnyContext( + BaseParserRuleContext::copy_from(ctx,AnyContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type IntAnyContext<'input> = BaseParserRuleContext<'input,IntAnyContextExt<'input>>; + +pub trait IntAnyContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Metaint + /// Returns `None` if there is no child corresponding to token Metaint + fn Metaint(&self) -> Option>> where Self:Sized{ + self.get_token(Metaint, 0) + } +} + +impl<'input> IntAnyContextAttrs<'input> for IntAnyContext<'input>{} + +pub struct IntAnyContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{IntAnyContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for IntAnyContext<'input>{} + +impl<'input,'a> Listenable + 'a> for IntAnyContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_intAny(self); + } +} + +impl<'input> CustomRuleContext<'input> for IntAnyContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for IntAnyContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for IntAnyContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for IntAnyContext<'input> {} + +impl<'input> IntAnyContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::IntAnyContext( + BaseParserRuleContext::copy_from(ctx,IntAnyContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type InconsistentContext<'input> = BaseParserRuleContext<'input,InconsistentContextExt<'input>>; + +pub trait InconsistentContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Question + /// Returns `None` if there is no child corresponding to token Question + fn Question(&self) -> Option>> where Self:Sized{ + self.get_token(Question, 0) + } + /// Retrieves first TerminalNode corresponding to token Identifier + /// Returns `None` if there is no child corresponding to token Identifier + fn Identifier(&self) -> Option>> where Self:Sized{ + self.get_token(Identifier, 0) + } + fn nullability(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } +} + +impl<'input> InconsistentContextAttrs<'input> for InconsistentContext<'input>{} + +pub struct InconsistentContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{InconsistentContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for InconsistentContext<'input>{} + +impl<'input,'a> Listenable + 'a> for InconsistentContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_inconsistent(self); + } +} + +impl<'input> CustomRuleContext<'input> for InconsistentContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for InconsistentContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for InconsistentContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for InconsistentContext<'input> {} + +impl<'input> InconsistentContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::InconsistentContext( + BaseParserRuleContext::copy_from(ctx,InconsistentContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type DatatypeBindingOrConstantContext<'input> = BaseParserRuleContext<'input,DatatypeBindingOrConstantContextExt<'input>>; + +pub trait DatatypeBindingOrConstantContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + fn identifierPath(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } + fn nullability(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } + fn variation(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } + fn parameters(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } +} + +impl<'input> DatatypeBindingOrConstantContextAttrs<'input> for DatatypeBindingOrConstantContext<'input>{} + +pub struct DatatypeBindingOrConstantContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{DatatypeBindingOrConstantContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for DatatypeBindingOrConstantContext<'input>{} + +impl<'input,'a> Listenable + 'a> for DatatypeBindingOrConstantContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_datatypeBindingOrConstant(self); + } +} + +impl<'input> CustomRuleContext<'input> for DatatypeBindingOrConstantContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for DatatypeBindingOrConstantContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for DatatypeBindingOrConstantContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for DatatypeBindingOrConstantContext<'input> {} + +impl<'input> DatatypeBindingOrConstantContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::DatatypeBindingOrConstantContext( + BaseParserRuleContext::copy_from(ctx,DatatypeBindingOrConstantContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type EnumSetContext<'input> = BaseParserRuleContext<'input,EnumSetContextExt<'input>>; + +pub trait EnumSetContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token OpenCurly + /// Returns `None` if there is no child corresponding to token OpenCurly + fn OpenCurly(&self) -> Option>> where Self:Sized{ + self.get_token(OpenCurly, 0) + } + /// Retrieves all `TerminalNode`s corresponding to token Identifier in current rule + fn Identifier_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() + } + /// Retrieves 'i's TerminalNode corresponding to token Identifier, starting from 0. + /// Returns `None` if number of children corresponding to token Identifier is less or equal than `i`. + fn Identifier(&self, i: usize) -> Option>> where Self:Sized{ + self.get_token(Identifier, i) + } + /// Retrieves first TerminalNode corresponding to token CloseCurly + /// Returns `None` if there is no child corresponding to token CloseCurly + fn CloseCurly(&self) -> Option>> where Self:Sized{ + self.get_token(CloseCurly, 0) + } + /// Retrieves all `TerminalNode`s corresponding to token Comma in current rule + fn Comma_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() + } + /// Retrieves 'i's TerminalNode corresponding to token Comma, starting from 0. + /// Returns `None` if number of children corresponding to token Comma is less or equal than `i`. + fn Comma(&self, i: usize) -> Option>> where Self:Sized{ + self.get_token(Comma, i) + } +} + +impl<'input> EnumSetContextAttrs<'input> for EnumSetContext<'input>{} + +pub struct EnumSetContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{EnumSetContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for EnumSetContext<'input>{} + +impl<'input,'a> Listenable + 'a> for EnumSetContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_enumSet(self); + } +} + +impl<'input> CustomRuleContext<'input> for EnumSetContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for EnumSetContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for EnumSetContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for EnumSetContext<'input> {} + +impl<'input> EnumSetContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::EnumSetContext( + BaseParserRuleContext::copy_from(ctx,EnumSetContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type StrAnyContext<'input> = BaseParserRuleContext<'input,StrAnyContextExt<'input>>; + +pub trait StrAnyContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Metastr + /// Returns `None` if there is no child corresponding to token Metastr + fn Metastr(&self) -> Option>> where Self:Sized{ + self.get_token(Metastr, 0) + } +} + +impl<'input> StrAnyContextAttrs<'input> for StrAnyContext<'input>{} + +pub struct StrAnyContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{StrAnyContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for StrAnyContext<'input>{} + +impl<'input,'a> Listenable + 'a> for StrAnyContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_strAny(self); + } +} + +impl<'input> CustomRuleContext<'input> for StrAnyContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for StrAnyContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for StrAnyContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for StrAnyContext<'input> {} + +impl<'input> StrAnyContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::StrAnyContext( + BaseParserRuleContext::copy_from(ctx,StrAnyContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type BoolTrueContext<'input> = BaseParserRuleContext<'input,BoolTrueContextExt<'input>>; + +pub trait BoolTrueContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token True + /// Returns `None` if there is no child corresponding to token True + fn True(&self) -> Option>> where Self:Sized{ + self.get_token(True, 0) + } +} + +impl<'input> BoolTrueContextAttrs<'input> for BoolTrueContext<'input>{} + +pub struct BoolTrueContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{BoolTrueContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for BoolTrueContext<'input>{} + +impl<'input,'a> Listenable + 'a> for BoolTrueContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_boolTrue(self); + } +} + +impl<'input> CustomRuleContext<'input> for BoolTrueContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for BoolTrueContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for BoolTrueContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for BoolTrueContext<'input> {} + +impl<'input> BoolTrueContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::BoolTrueContext( + BaseParserRuleContext::copy_from(ctx,BoolTrueContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type IntAtMostContext<'input> = BaseParserRuleContext<'input,IntAtMostContextExt<'input>>; + +pub trait IntAtMostContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Range + /// Returns `None` if there is no child corresponding to token Range + fn Range(&self) -> Option>> where Self:Sized{ + self.get_token(Range, 0) + } + fn integer(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } +} + +impl<'input> IntAtMostContextAttrs<'input> for IntAtMostContext<'input>{} + +pub struct IntAtMostContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{IntAtMostContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for IntAtMostContext<'input>{} + +impl<'input,'a> Listenable + 'a> for IntAtMostContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_intAtMost(self); + } +} + +impl<'input> CustomRuleContext<'input> for IntAtMostContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for IntAtMostContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for IntAtMostContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for IntAtMostContext<'input> {} + +impl<'input> IntAtMostContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::IntAtMostContext( + BaseParserRuleContext::copy_from(ctx,IntAtMostContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type IntAtLeastContext<'input> = BaseParserRuleContext<'input,IntAtLeastContextExt<'input>>; + +pub trait IntAtLeastContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + fn integer(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } + /// Retrieves first TerminalNode corresponding to token Range + /// Returns `None` if there is no child corresponding to token Range + fn Range(&self) -> Option>> where Self:Sized{ + self.get_token(Range, 0) + } +} + +impl<'input> IntAtLeastContextAttrs<'input> for IntAtLeastContext<'input>{} + +pub struct IntAtLeastContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{IntAtLeastContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for IntAtLeastContext<'input>{} + +impl<'input,'a> Listenable + 'a> for IntAtLeastContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_intAtLeast(self); + } +} + +impl<'input> CustomRuleContext<'input> for IntAtLeastContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for IntAtLeastContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for IntAtLeastContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for IntAtLeastContext<'input> {} + +impl<'input> IntAtLeastContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::IntAtLeastContext( + BaseParserRuleContext::copy_from(ctx,IntAtLeastContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type IntExactlyContext<'input> = BaseParserRuleContext<'input,IntExactlyContextExt<'input>>; + +pub trait IntExactlyContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + fn integer(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } +} + +impl<'input> IntExactlyContextAttrs<'input> for IntExactlyContext<'input>{} + +pub struct IntExactlyContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{IntExactlyContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for IntExactlyContext<'input>{} + +impl<'input,'a> Listenable + 'a> for IntExactlyContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_intExactly(self); + } +} + +impl<'input> CustomRuleContext<'input> for IntExactlyContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for IntExactlyContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for IntExactlyContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for IntExactlyContext<'input> {} + +impl<'input> IntExactlyContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::IntExactlyContext( + BaseParserRuleContext::copy_from(ctx,IntExactlyContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type FunctionContext<'input> = BaseParserRuleContext<'input,FunctionContextExt<'input>>; + +pub trait FunctionContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Identifier + /// Returns `None` if there is no child corresponding to token Identifier + fn Identifier(&self) -> Option>> where Self:Sized{ + self.get_token(Identifier, 0) + } + /// Retrieves first TerminalNode corresponding to token OpenParen + /// Returns `None` if there is no child corresponding to token OpenParen + fn OpenParen(&self) -> Option>> where Self:Sized{ + self.get_token(OpenParen, 0) + } + /// Retrieves first TerminalNode corresponding to token CloseParen + /// Returns `None` if there is no child corresponding to token CloseParen + fn CloseParen(&self) -> Option>> where Self:Sized{ + self.get_token(CloseParen, 0) + } + fn pattern_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() + } + fn pattern(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) + } + /// Retrieves all `TerminalNode`s corresponding to token Comma in current rule + fn Comma_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() + } + /// Retrieves 'i's TerminalNode corresponding to token Comma, starting from 0. + /// Returns `None` if number of children corresponding to token Comma is less or equal than `i`. + fn Comma(&self, i: usize) -> Option>> where Self:Sized{ + self.get_token(Comma, i) + } +} + +impl<'input> FunctionContextAttrs<'input> for FunctionContext<'input>{} + +pub struct FunctionContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{FunctionContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for FunctionContext<'input>{} + +impl<'input,'a> Listenable + 'a> for FunctionContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_function(self); + } +} + +impl<'input> CustomRuleContext<'input> for FunctionContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for FunctionContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for FunctionContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for FunctionContext<'input> {} + +impl<'input> FunctionContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::FunctionContext( + BaseParserRuleContext::copy_from(ctx,FunctionContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type BoolAnyContext<'input> = BaseParserRuleContext<'input,BoolAnyContextExt<'input>>; + +pub trait BoolAnyContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Metabool + /// Returns `None` if there is no child corresponding to token Metabool + fn Metabool(&self) -> Option>> where Self:Sized{ + self.get_token(Metabool, 0) + } +} + +impl<'input> BoolAnyContextAttrs<'input> for BoolAnyContext<'input>{} + +pub struct BoolAnyContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{BoolAnyContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for BoolAnyContext<'input>{} + +impl<'input,'a> Listenable + 'a> for BoolAnyContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_boolAny(self); + } +} + +impl<'input> CustomRuleContext<'input> for BoolAnyContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for BoolAnyContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for BoolAnyContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for BoolAnyContext<'input> {} + +impl<'input> BoolAnyContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::BoolAnyContext( + BaseParserRuleContext::copy_from(ctx,BoolAnyContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type UnaryNotContext<'input> = BaseParserRuleContext<'input,UnaryNotContextExt<'input>>; + +pub trait UnaryNotContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Bang + /// Returns `None` if there is no child corresponding to token Bang + fn Bang(&self) -> Option>> where Self:Sized{ + self.get_token(Bang, 0) + } + fn pattern(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } +} + +impl<'input> UnaryNotContextAttrs<'input> for UnaryNotContext<'input>{} + +pub struct UnaryNotContextExt<'input>{ + base:PatternMiscContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{UnaryNotContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for UnaryNotContext<'input>{} + +impl<'input,'a> Listenable + 'a> for UnaryNotContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_unaryNot(self); + } +} + +impl<'input> CustomRuleContext<'input> for UnaryNotContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_patternMisc } + //fn type_rule_index() -> usize where Self: Sized { RULE_patternMisc } +} + +impl<'input> Borrow> for UnaryNotContext<'input>{ + fn borrow(&self) -> &PatternMiscContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for UnaryNotContext<'input>{ + fn borrow_mut(&mut self) -> &mut PatternMiscContextExt<'input> { &mut self.base } +} + +impl<'input> PatternMiscContextAttrs<'input> for UnaryNotContext<'input> {} + +impl<'input> UnaryNotContextExt<'input>{ + fn new(ctx: &dyn PatternMiscContextAttrs<'input>) -> Rc> { + Rc::new( + PatternMiscContextAll::UnaryNotContext( + BaseParserRuleContext::copy_from(ctx,UnaryNotContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn patternMisc(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = PatternMiscContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 36, RULE_patternMisc); + let mut _localctx: Rc = _localctx; + let mut _la: isize = -1; + let result: Result<(), ANTLRError> = (|| { + + recog.base.set_state(287); + recog.err_handler.sync(&mut recog.base)?; + match recog.interpreter.adaptive_predict(28,&mut recog.base)? { + 1 =>{ + let tmp = ParenthesesContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + recog.base.set_state(215); + recog.base.match_token(OpenParen,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(216); + recog.pattern()?; + + recog.base.set_state(217); + recog.base.match_token(CloseParen,&mut recog.err_handler)?; + + } + } + , + 2 =>{ + let tmp = IfThenElseContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 2); + _localctx = tmp; + { + recog.base.set_state(219); + recog.base.match_token(If,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(220); + recog.pattern()?; + + recog.base.set_state(221); + recog.base.match_token(Then,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(222); + recog.pattern()?; + + recog.base.set_state(223); + recog.base.match_token(Else,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(224); + recog.pattern()?; + + } + } + , + 3 =>{ + let tmp = UnaryNotContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 3); + _localctx = tmp; + { + recog.base.set_state(226); + recog.base.match_token(Bang,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(227); + recog.pattern()?; + + } + } + , + 4 =>{ + let tmp = AnyContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 4); + _localctx = tmp; + { + recog.base.set_state(228); + recog.base.match_token(Question,&mut recog.err_handler)?; + + } + } + , + 5 =>{ + let tmp = BoolAnyContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 5); + _localctx = tmp; + { + recog.base.set_state(229); + recog.base.match_token(Metabool,&mut recog.err_handler)?; + + } + } + , + 6 =>{ + let tmp = BoolTrueContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 6); + _localctx = tmp; + { + recog.base.set_state(230); + recog.base.match_token(True,&mut recog.err_handler)?; + + } + } + , + 7 =>{ + let tmp = BoolFalseContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 7); + _localctx = tmp; + { + recog.base.set_state(231); + recog.base.match_token(False,&mut recog.err_handler)?; + + } + } + , + 8 =>{ + let tmp = IntAnyContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 8); + _localctx = tmp; + { + recog.base.set_state(232); + recog.base.match_token(Metaint,&mut recog.err_handler)?; + + } + } + , + 9 =>{ + let tmp = IntRangeContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 9); + _localctx = tmp; + { + /*InvokeRule integer*/ + recog.base.set_state(233); + recog.integer()?; + + recog.base.set_state(234); + recog.base.match_token(Range,&mut recog.err_handler)?; + + /*InvokeRule integer*/ + recog.base.set_state(235); + recog.integer()?; + + } + } + , + 10 =>{ + let tmp = IntAtLeastContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 10); + _localctx = tmp; + { + /*InvokeRule integer*/ + recog.base.set_state(237); + recog.integer()?; + + recog.base.set_state(238); + recog.base.match_token(Range,&mut recog.err_handler)?; + + } + } + , + 11 =>{ + let tmp = IntAtMostContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 11); + _localctx = tmp; + { + recog.base.set_state(240); + recog.base.match_token(Range,&mut recog.err_handler)?; + + /*InvokeRule integer*/ + recog.base.set_state(241); + recog.integer()?; + + } + } + , + 12 =>{ + let tmp = IntExactlyContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 12); + _localctx = tmp; + { + /*InvokeRule integer*/ + recog.base.set_state(242); + recog.integer()?; + + } + } + , + 13 =>{ + let tmp = EnumAnyContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 13); + _localctx = tmp; + { + recog.base.set_state(243); + recog.base.match_token(Metaenum,&mut recog.err_handler)?; + + } + } + , + 14 =>{ + let tmp = EnumSetContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 14); + _localctx = tmp; + { + recog.base.set_state(244); + recog.base.match_token(OpenCurly,&mut recog.err_handler)?; + + recog.base.set_state(245); + recog.base.match_token(Identifier,&mut recog.err_handler)?; + + recog.base.set_state(250); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + while _la==Comma { + { + { + recog.base.set_state(246); + recog.base.match_token(Comma,&mut recog.err_handler)?; + + recog.base.set_state(247); + recog.base.match_token(Identifier,&mut recog.err_handler)?; + + } + } + recog.base.set_state(252); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + } + recog.base.set_state(253); + recog.base.match_token(CloseCurly,&mut recog.err_handler)?; + + } + } + , + 15 =>{ + let tmp = StrAnyContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 15); + _localctx = tmp; + { + recog.base.set_state(254); + recog.base.match_token(Metastr,&mut recog.err_handler)?; + + } + } + , + 16 =>{ + let tmp = StrExactlyContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 16); + _localctx = tmp; + { + recog.base.set_state(255); + recog.base.match_token(String,&mut recog.err_handler)?; + + } + } + , + 17 =>{ + let tmp = DtAnyContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 17); + _localctx = tmp; + { + recog.base.set_state(256); + recog.base.match_token(Typename,&mut recog.err_handler)?; + + } + } + , + 18 =>{ + let tmp = FunctionContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 18); + _localctx = tmp; + { + recog.base.set_state(257); + recog.base.match_token(Identifier,&mut recog.err_handler)?; + + recog.base.set_state(258); + recog.base.match_token(OpenParen,&mut recog.err_handler)?; + + recog.base.set_state(267); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + if (((_la) & !0x3f) == 0 && ((1usize << _la) & ((1usize << If) | (1usize << True) | (1usize << False) | (1usize << Metabool) | (1usize << Metaint) | (1usize << Metaenum) | (1usize << Metastr) | (1usize << Typename) | (1usize << Question) | (1usize << Bang) | (1usize << OpenParen) | (1usize << OpenCurly))) != 0) || ((((_la - 40)) & !0x3f) == 0 && ((1usize << (_la - 40)) & ((1usize << (Plus - 40)) | (1usize << (Minus - 40)) | (1usize << (Range - 40)) | (1usize << (Nonzero - 40)) | (1usize << (Zero - 40)) | (1usize << (String - 40)) | (1usize << (Identifier - 40)))) != 0) { + { + /*InvokeRule pattern*/ + recog.base.set_state(259); + recog.pattern()?; + + recog.base.set_state(264); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + while _la==Comma { + { + { + recog.base.set_state(260); + recog.base.match_token(Comma,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(261); + recog.pattern()?; + + } + } + recog.base.set_state(266); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + } + } + } + + recog.base.set_state(269); + recog.base.match_token(CloseParen,&mut recog.err_handler)?; + + } + } + , + 19 =>{ + let tmp = DatatypeBindingOrConstantContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 19); + _localctx = tmp; + { + /*InvokeRule identifierPath*/ + recog.base.set_state(270); + recog.identifierPath()?; + + recog.base.set_state(272); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + if _la==Question || _la==Bang { + { + /*InvokeRule nullability*/ + recog.base.set_state(271); + recog.nullability()?; + + } + } + + recog.base.set_state(275); + recog.err_handler.sync(&mut recog.base)?; + match recog.interpreter.adaptive_predict(25,&mut recog.base)? { + x if x == 1=>{ + { + /*InvokeRule variation*/ + recog.base.set_state(274); + recog.variation()?; + + } + } + + _ => {} + } + recog.base.set_state(278); + recog.err_handler.sync(&mut recog.base)?; + match recog.interpreter.adaptive_predict(26,&mut recog.base)? { + x if x == 1=>{ + { + /*InvokeRule parameters*/ + recog.base.set_state(277); + recog.parameters()?; + + } + } + + _ => {} + } + } + } + , + 20 =>{ + let tmp = InconsistentContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 20); + _localctx = tmp; + { + recog.base.set_state(280); + recog.base.match_token(Question,&mut recog.err_handler)?; + + recog.base.set_state(281); + recog.base.match_token(Identifier,&mut recog.err_handler)?; + + recog.base.set_state(283); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + if _la==Question || _la==Bang { + { + /*InvokeRule nullability*/ + recog.base.set_state(282); + recog.nullability()?; + + } + } + + } + } + , + 21 =>{ + let tmp = UnaryNegateContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 21); + _localctx = tmp; + { + recog.base.set_state(285); + recog.base.match_token(Minus,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(286); + recog.pattern()?; + + } + } + + _ => {} + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- nullability ---------------- +#[derive(Debug)] +pub enum NullabilityContextAll<'input>{ + NullableContext(NullableContext<'input>), + NonNullableContext(NonNullableContext<'input>), + NullableIfContext(NullableIfContext<'input>), +Error(NullabilityContext<'input>) +} +antlr_rust::tid!{NullabilityContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for NullabilityContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for NullabilityContextAll<'input>{} + +impl<'input> Deref for NullabilityContextAll<'input>{ + type Target = dyn NullabilityContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use NullabilityContextAll::*; + match self{ + NullableContext(inner) => inner, + NonNullableContext(inner) => inner, + NullableIfContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for NullabilityContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type NullabilityContext<'input> = BaseParserRuleContext<'input,NullabilityContextExt<'input>>; + +#[derive(Clone)] +pub struct NullabilityContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for NullabilityContext<'input>{} + +impl<'input,'a> Listenable + 'a> for NullabilityContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for NullabilityContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_nullability } + //fn type_rule_index() -> usize where Self: Sized { RULE_nullability } +} +antlr_rust::tid!{NullabilityContextExt<'a>} + +impl<'input> NullabilityContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + NullabilityContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,NullabilityContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait NullabilityContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> NullabilityContextAttrs<'input> for NullabilityContext<'input>{} + +pub type NullableContext<'input> = BaseParserRuleContext<'input,NullableContextExt<'input>>; + +pub trait NullableContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Question + /// Returns `None` if there is no child corresponding to token Question + fn Question(&self) -> Option>> where Self:Sized{ + self.get_token(Question, 0) + } +} + +impl<'input> NullableContextAttrs<'input> for NullableContext<'input>{} + +pub struct NullableContextExt<'input>{ + base:NullabilityContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{NullableContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for NullableContext<'input>{} + +impl<'input,'a> Listenable + 'a> for NullableContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_nullable(self); + } +} + +impl<'input> CustomRuleContext<'input> for NullableContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_nullability } + //fn type_rule_index() -> usize where Self: Sized { RULE_nullability } +} + +impl<'input> Borrow> for NullableContext<'input>{ + fn borrow(&self) -> &NullabilityContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for NullableContext<'input>{ + fn borrow_mut(&mut self) -> &mut NullabilityContextExt<'input> { &mut self.base } +} + +impl<'input> NullabilityContextAttrs<'input> for NullableContext<'input> {} + +impl<'input> NullableContextExt<'input>{ + fn new(ctx: &dyn NullabilityContextAttrs<'input>) -> Rc> { + Rc::new( + NullabilityContextAll::NullableContext( + BaseParserRuleContext::copy_from(ctx,NullableContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type NonNullableContext<'input> = BaseParserRuleContext<'input,NonNullableContextExt<'input>>; + +pub trait NonNullableContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Bang + /// Returns `None` if there is no child corresponding to token Bang + fn Bang(&self) -> Option>> where Self:Sized{ + self.get_token(Bang, 0) + } +} + +impl<'input> NonNullableContextAttrs<'input> for NonNullableContext<'input>{} + +pub struct NonNullableContextExt<'input>{ + base:NullabilityContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{NonNullableContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for NonNullableContext<'input>{} + +impl<'input,'a> Listenable + 'a> for NonNullableContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_nonNullable(self); + } +} + +impl<'input> CustomRuleContext<'input> for NonNullableContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_nullability } + //fn type_rule_index() -> usize where Self: Sized { RULE_nullability } +} + +impl<'input> Borrow> for NonNullableContext<'input>{ + fn borrow(&self) -> &NullabilityContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for NonNullableContext<'input>{ + fn borrow_mut(&mut self) -> &mut NullabilityContextExt<'input> { &mut self.base } +} + +impl<'input> NullabilityContextAttrs<'input> for NonNullableContext<'input> {} + +impl<'input> NonNullableContextExt<'input>{ + fn new(ctx: &dyn NullabilityContextAttrs<'input>) -> Rc> { + Rc::new( + NullabilityContextAll::NonNullableContext( + BaseParserRuleContext::copy_from(ctx,NonNullableContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type NullableIfContext<'input> = BaseParserRuleContext<'input,NullableIfContextExt<'input>>; + +pub trait NullableIfContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Question + /// Returns `None` if there is no child corresponding to token Question + fn Question(&self) -> Option>> where Self:Sized{ + self.get_token(Question, 0) + } + fn pattern(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } +} + +impl<'input> NullableIfContextAttrs<'input> for NullableIfContext<'input>{} + +pub struct NullableIfContextExt<'input>{ + base:NullabilityContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{NullableIfContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for NullableIfContext<'input>{} + +impl<'input,'a> Listenable + 'a> for NullableIfContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_nullableIf(self); + } +} + +impl<'input> CustomRuleContext<'input> for NullableIfContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_nullability } + //fn type_rule_index() -> usize where Self: Sized { RULE_nullability } +} + +impl<'input> Borrow> for NullableIfContext<'input>{ + fn borrow(&self) -> &NullabilityContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for NullableIfContext<'input>{ + fn borrow_mut(&mut self) -> &mut NullabilityContextExt<'input> { &mut self.base } +} + +impl<'input> NullabilityContextAttrs<'input> for NullableIfContext<'input> {} + +impl<'input> NullableIfContextExt<'input>{ + fn new(ctx: &dyn NullabilityContextAttrs<'input>) -> Rc> { + Rc::new( + NullabilityContextAll::NullableIfContext( + BaseParserRuleContext::copy_from(ctx,NullableIfContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn nullability(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = NullabilityContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 38, RULE_nullability); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + recog.base.set_state(293); + recog.err_handler.sync(&mut recog.base)?; + match recog.interpreter.adaptive_predict(29,&mut recog.base)? { + 1 =>{ + let tmp = NonNullableContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + recog.base.set_state(289); + recog.base.match_token(Bang,&mut recog.err_handler)?; + + } + } + , + 2 =>{ + let tmp = NullableContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 2); + _localctx = tmp; + { + recog.base.set_state(290); + recog.base.match_token(Question,&mut recog.err_handler)?; + + } + } + , + 3 =>{ + let tmp = NullableIfContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 3); + _localctx = tmp; + { + recog.base.set_state(291); + recog.base.match_token(Question,&mut recog.err_handler)?; + + /*InvokeRule pattern*/ + recog.base.set_state(292); + recog.pattern()?; + + } + } + + _ => {} + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- variation ---------------- +pub type VariationContextAll<'input> = VariationContext<'input>; + + +pub type VariationContext<'input> = BaseParserRuleContext<'input,VariationContextExt<'input>>; + +#[derive(Clone)] +pub struct VariationContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for VariationContext<'input>{} + +impl<'input,'a> Listenable + 'a> for VariationContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_variation(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_variation(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for VariationContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_variation } + //fn type_rule_index() -> usize where Self: Sized { RULE_variation } +} +antlr_rust::tid!{VariationContextExt<'a>} + +impl<'input> VariationContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,VariationContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait VariationContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +/// Retrieves first TerminalNode corresponding to token OpenSquare +/// Returns `None` if there is no child corresponding to token OpenSquare +fn OpenSquare(&self) -> Option>> where Self:Sized{ + self.get_token(OpenSquare, 0) +} +fn variationBody(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) +} +/// Retrieves first TerminalNode corresponding to token CloseSquare +/// Returns `None` if there is no child corresponding to token CloseSquare +fn CloseSquare(&self) -> Option>> where Self:Sized{ + self.get_token(CloseSquare, 0) +} + +} + +impl<'input> VariationContextAttrs<'input> for VariationContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn variation(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = VariationContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 40, RULE_variation); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(295); + recog.base.match_token(OpenSquare,&mut recog.err_handler)?; + + /*InvokeRule variationBody*/ + recog.base.set_state(296); + recog.variationBody()?; + + recog.base.set_state(297); + recog.base.match_token(CloseSquare,&mut recog.err_handler)?; + + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- variationBody ---------------- +#[derive(Debug)] +pub enum VariationBodyContextAll<'input>{ + VarAnyContext(VarAnyContext<'input>), + VarSystemPreferredContext(VarSystemPreferredContext<'input>), + VarUserDefinedContext(VarUserDefinedContext<'input>), +Error(VariationBodyContext<'input>) +} +antlr_rust::tid!{VariationBodyContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for VariationBodyContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for VariationBodyContextAll<'input>{} + +impl<'input> Deref for VariationBodyContextAll<'input>{ + type Target = dyn VariationBodyContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use VariationBodyContextAll::*; + match self{ + VarAnyContext(inner) => inner, + VarSystemPreferredContext(inner) => inner, + VarUserDefinedContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for VariationBodyContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type VariationBodyContext<'input> = BaseParserRuleContext<'input,VariationBodyContextExt<'input>>; + +#[derive(Clone)] +pub struct VariationBodyContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for VariationBodyContext<'input>{} + +impl<'input,'a> Listenable + 'a> for VariationBodyContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for VariationBodyContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_variationBody } + //fn type_rule_index() -> usize where Self: Sized { RULE_variationBody } +} +antlr_rust::tid!{VariationBodyContextExt<'a>} + +impl<'input> VariationBodyContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + VariationBodyContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,VariationBodyContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait VariationBodyContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> VariationBodyContextAttrs<'input> for VariationBodyContext<'input>{} + +pub type VarAnyContext<'input> = BaseParserRuleContext<'input,VarAnyContextExt<'input>>; + +pub trait VarAnyContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Question + /// Returns `None` if there is no child corresponding to token Question + fn Question(&self) -> Option>> where Self:Sized{ + self.get_token(Question, 0) + } +} + +impl<'input> VarAnyContextAttrs<'input> for VarAnyContext<'input>{} + +pub struct VarAnyContextExt<'input>{ + base:VariationBodyContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{VarAnyContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for VarAnyContext<'input>{} + +impl<'input,'a> Listenable + 'a> for VarAnyContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_varAny(self); + } +} + +impl<'input> CustomRuleContext<'input> for VarAnyContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_variationBody } + //fn type_rule_index() -> usize where Self: Sized { RULE_variationBody } +} + +impl<'input> Borrow> for VarAnyContext<'input>{ + fn borrow(&self) -> &VariationBodyContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for VarAnyContext<'input>{ + fn borrow_mut(&mut self) -> &mut VariationBodyContextExt<'input> { &mut self.base } +} + +impl<'input> VariationBodyContextAttrs<'input> for VarAnyContext<'input> {} + +impl<'input> VarAnyContextExt<'input>{ + fn new(ctx: &dyn VariationBodyContextAttrs<'input>) -> Rc> { + Rc::new( + VariationBodyContextAll::VarAnyContext( + BaseParserRuleContext::copy_from(ctx,VarAnyContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type VarSystemPreferredContext<'input> = BaseParserRuleContext<'input,VarSystemPreferredContextExt<'input>>; + +pub trait VarSystemPreferredContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Zero + /// Returns `None` if there is no child corresponding to token Zero + fn Zero(&self) -> Option>> where Self:Sized{ + self.get_token(Zero, 0) + } +} + +impl<'input> VarSystemPreferredContextAttrs<'input> for VarSystemPreferredContext<'input>{} + +pub struct VarSystemPreferredContextExt<'input>{ + base:VariationBodyContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{VarSystemPreferredContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for VarSystemPreferredContext<'input>{} + +impl<'input,'a> Listenable + 'a> for VarSystemPreferredContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_varSystemPreferred(self); + } +} + +impl<'input> CustomRuleContext<'input> for VarSystemPreferredContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_variationBody } + //fn type_rule_index() -> usize where Self: Sized { RULE_variationBody } +} + +impl<'input> Borrow> for VarSystemPreferredContext<'input>{ + fn borrow(&self) -> &VariationBodyContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for VarSystemPreferredContext<'input>{ + fn borrow_mut(&mut self) -> &mut VariationBodyContextExt<'input> { &mut self.base } +} + +impl<'input> VariationBodyContextAttrs<'input> for VarSystemPreferredContext<'input> {} + +impl<'input> VarSystemPreferredContextExt<'input>{ + fn new(ctx: &dyn VariationBodyContextAttrs<'input>) -> Rc> { + Rc::new( + VariationBodyContextAll::VarSystemPreferredContext( + BaseParserRuleContext::copy_from(ctx,VarSystemPreferredContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type VarUserDefinedContext<'input> = BaseParserRuleContext<'input,VarUserDefinedContextExt<'input>>; + +pub trait VarUserDefinedContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + fn identifierPath(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } +} + +impl<'input> VarUserDefinedContextAttrs<'input> for VarUserDefinedContext<'input>{} + +pub struct VarUserDefinedContextExt<'input>{ + base:VariationBodyContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{VarUserDefinedContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for VarUserDefinedContext<'input>{} + +impl<'input,'a> Listenable + 'a> for VarUserDefinedContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_varUserDefined(self); + } +} + +impl<'input> CustomRuleContext<'input> for VarUserDefinedContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_variationBody } + //fn type_rule_index() -> usize where Self: Sized { RULE_variationBody } +} + +impl<'input> Borrow> for VarUserDefinedContext<'input>{ + fn borrow(&self) -> &VariationBodyContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for VarUserDefinedContext<'input>{ + fn borrow_mut(&mut self) -> &mut VariationBodyContextExt<'input> { &mut self.base } +} + +impl<'input> VariationBodyContextAttrs<'input> for VarUserDefinedContext<'input> {} + +impl<'input> VarUserDefinedContextExt<'input>{ + fn new(ctx: &dyn VariationBodyContextAttrs<'input>) -> Rc> { + Rc::new( + VariationBodyContextAll::VarUserDefinedContext( + BaseParserRuleContext::copy_from(ctx,VarUserDefinedContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn variationBody(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = VariationBodyContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 42, RULE_variationBody); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + recog.base.set_state(302); + recog.err_handler.sync(&mut recog.base)?; + match recog.base.input.la(1) { + Question + => { + let tmp = VarAnyContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + recog.base.set_state(299); + recog.base.match_token(Question,&mut recog.err_handler)?; + + } + } + + Zero + => { + let tmp = VarSystemPreferredContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 2); + _localctx = tmp; + { + recog.base.set_state(300); + recog.base.match_token(Zero,&mut recog.err_handler)?; + + } + } + + Identifier + => { + let tmp = VarUserDefinedContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 3); + _localctx = tmp; + { + /*InvokeRule identifierPath*/ + recog.base.set_state(301); + recog.identifierPath()?; + + } + } + + _ => Err(ANTLRError::NoAltError(NoViableAltError::new(&mut recog.base)))? + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- parameters ---------------- +pub type ParametersContextAll<'input> = ParametersContext<'input>; + + +pub type ParametersContext<'input> = BaseParserRuleContext<'input,ParametersContextExt<'input>>; + +#[derive(Clone)] +pub struct ParametersContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for ParametersContext<'input>{} + +impl<'input,'a> Listenable + 'a> for ParametersContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_parameters(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_parameters(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for ParametersContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_parameters } + //fn type_rule_index() -> usize where Self: Sized { RULE_parameters } +} +antlr_rust::tid!{ParametersContextExt<'a>} + +impl<'input> ParametersContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,ParametersContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait ParametersContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +/// Retrieves first TerminalNode corresponding to token LessThan +/// Returns `None` if there is no child corresponding to token LessThan +fn LessThan(&self) -> Option>> where Self:Sized{ + self.get_token(LessThan, 0) +} +/// Retrieves first TerminalNode corresponding to token GreaterThan +/// Returns `None` if there is no child corresponding to token GreaterThan +fn GreaterThan(&self) -> Option>> where Self:Sized{ + self.get_token(GreaterThan, 0) +} +fn parameter_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +fn parameter(&self, i: usize) -> Option>> where Self:Sized{ + self.child_of_type(i) +} +/// Retrieves all `TerminalNode`s corresponding to token Comma in current rule +fn Comma_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +/// Retrieves 'i's TerminalNode corresponding to token Comma, starting from 0. +/// Returns `None` if number of children corresponding to token Comma is less or equal than `i`. +fn Comma(&self, i: usize) -> Option>> where Self:Sized{ + self.get_token(Comma, i) +} + +} + +impl<'input> ParametersContextAttrs<'input> for ParametersContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn parameters(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = ParametersContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 44, RULE_parameters); + let mut _localctx: Rc = _localctx; + let mut _la: isize = -1; + let result: Result<(), ANTLRError> = (|| { + + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(304); + recog.base.match_token(LessThan,&mut recog.err_handler)?; + + recog.base.set_state(313); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + if (((_la) & !0x3f) == 0 && ((1usize << _la) & ((1usize << If) | (1usize << Null) | (1usize << True) | (1usize << False) | (1usize << Metabool) | (1usize << Metaint) | (1usize << Metaenum) | (1usize << Metastr) | (1usize << Typename) | (1usize << Question) | (1usize << Bang) | (1usize << OpenParen) | (1usize << OpenCurly))) != 0) || ((((_la - 40)) & !0x3f) == 0 && ((1usize << (_la - 40)) & ((1usize << (Plus - 40)) | (1usize << (Minus - 40)) | (1usize << (Range - 40)) | (1usize << (Nonzero - 40)) | (1usize << (Zero - 40)) | (1usize << (String - 40)) | (1usize << (Identifier - 40)))) != 0) { + { + /*InvokeRule parameter*/ + recog.base.set_state(305); + recog.parameter()?; + + recog.base.set_state(310); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + while _la==Comma { + { + { + recog.base.set_state(306); + recog.base.match_token(Comma,&mut recog.err_handler)?; + + /*InvokeRule parameter*/ + recog.base.set_state(307); + recog.parameter()?; + + } + } + recog.base.set_state(312); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + } + } + } + + recog.base.set_state(315); + recog.base.match_token(GreaterThan,&mut recog.err_handler)?; + + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- parameter ---------------- +pub type ParameterContextAll<'input> = ParameterContext<'input>; + + +pub type ParameterContext<'input> = BaseParserRuleContext<'input,ParameterContextExt<'input>>; + +#[derive(Clone)] +pub struct ParameterContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for ParameterContext<'input>{} + +impl<'input,'a> Listenable + 'a> for ParameterContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_parameter(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_parameter(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for ParameterContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_parameter } + //fn type_rule_index() -> usize where Self: Sized { RULE_parameter } +} +antlr_rust::tid!{ParameterContextExt<'a>} + +impl<'input> ParameterContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,ParameterContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait ParameterContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +fn parameterValue(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) +} +fn identifierOrString(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) +} +/// Retrieves first TerminalNode corresponding to token Colon +/// Returns `None` if there is no child corresponding to token Colon +fn Colon(&self) -> Option>> where Self:Sized{ + self.get_token(Colon, 0) +} + +} + +impl<'input> ParameterContextAttrs<'input> for ParameterContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn parameter(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = ParameterContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 46, RULE_parameter); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(320); + recog.err_handler.sync(&mut recog.base)?; + match recog.interpreter.adaptive_predict(33,&mut recog.base)? { + x if x == 1=>{ + { + /*InvokeRule identifierOrString*/ + recog.base.set_state(317); + recog.identifierOrString()?; + + recog.base.set_state(318); + recog.base.match_token(Colon,&mut recog.err_handler)?; + + } + } + + _ => {} + } + /*InvokeRule parameterValue*/ + recog.base.set_state(322); + recog.parameterValue()?; + + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- parameterValue ---------------- +#[derive(Debug)] +pub enum ParameterValueContextAll<'input>{ + SpecifiedContext(SpecifiedContext<'input>), + NullContext(NullContext<'input>), +Error(ParameterValueContext<'input>) +} +antlr_rust::tid!{ParameterValueContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for ParameterValueContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for ParameterValueContextAll<'input>{} + +impl<'input> Deref for ParameterValueContextAll<'input>{ + type Target = dyn ParameterValueContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use ParameterValueContextAll::*; + match self{ + SpecifiedContext(inner) => inner, + NullContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for ParameterValueContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type ParameterValueContext<'input> = BaseParserRuleContext<'input,ParameterValueContextExt<'input>>; + +#[derive(Clone)] +pub struct ParameterValueContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for ParameterValueContext<'input>{} + +impl<'input,'a> Listenable + 'a> for ParameterValueContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for ParameterValueContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_parameterValue } + //fn type_rule_index() -> usize where Self: Sized { RULE_parameterValue } +} +antlr_rust::tid!{ParameterValueContextExt<'a>} + +impl<'input> ParameterValueContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + ParameterValueContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,ParameterValueContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait ParameterValueContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> ParameterValueContextAttrs<'input> for ParameterValueContext<'input>{} + +pub type SpecifiedContext<'input> = BaseParserRuleContext<'input,SpecifiedContextExt<'input>>; + +pub trait SpecifiedContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + fn pattern(&self) -> Option>> where Self:Sized{ + self.child_of_type(0) + } +} + +impl<'input> SpecifiedContextAttrs<'input> for SpecifiedContext<'input>{} + +pub struct SpecifiedContextExt<'input>{ + base:ParameterValueContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{SpecifiedContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for SpecifiedContext<'input>{} + +impl<'input,'a> Listenable + 'a> for SpecifiedContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Specified(self); + } +} + +impl<'input> CustomRuleContext<'input> for SpecifiedContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_parameterValue } + //fn type_rule_index() -> usize where Self: Sized { RULE_parameterValue } +} + +impl<'input> Borrow> for SpecifiedContext<'input>{ + fn borrow(&self) -> &ParameterValueContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for SpecifiedContext<'input>{ + fn borrow_mut(&mut self) -> &mut ParameterValueContextExt<'input> { &mut self.base } +} + +impl<'input> ParameterValueContextAttrs<'input> for SpecifiedContext<'input> {} + +impl<'input> SpecifiedContextExt<'input>{ + fn new(ctx: &dyn ParameterValueContextAttrs<'input>) -> Rc> { + Rc::new( + ParameterValueContextAll::SpecifiedContext( + BaseParserRuleContext::copy_from(ctx,SpecifiedContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type NullContext<'input> = BaseParserRuleContext<'input,NullContextExt<'input>>; + +pub trait NullContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Null + /// Returns `None` if there is no child corresponding to token Null + fn Null(&self) -> Option>> where Self:Sized{ + self.get_token(Null, 0) + } +} + +impl<'input> NullContextAttrs<'input> for NullContext<'input>{} + +pub struct NullContextExt<'input>{ + base:ParameterValueContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{NullContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for NullContext<'input>{} + +impl<'input,'a> Listenable + 'a> for NullContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Null(self); + } +} + +impl<'input> CustomRuleContext<'input> for NullContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_parameterValue } + //fn type_rule_index() -> usize where Self: Sized { RULE_parameterValue } +} + +impl<'input> Borrow> for NullContext<'input>{ + fn borrow(&self) -> &ParameterValueContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for NullContext<'input>{ + fn borrow_mut(&mut self) -> &mut ParameterValueContextExt<'input> { &mut self.base } +} + +impl<'input> ParameterValueContextAttrs<'input> for NullContext<'input> {} + +impl<'input> NullContextExt<'input>{ + fn new(ctx: &dyn ParameterValueContextAttrs<'input>) -> Rc> { + Rc::new( + ParameterValueContextAll::NullContext( + BaseParserRuleContext::copy_from(ctx,NullContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn parameterValue(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = ParameterValueContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 48, RULE_parameterValue); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + recog.base.set_state(326); + recog.err_handler.sync(&mut recog.base)?; + match recog.base.input.la(1) { + Null + => { + let tmp = NullContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + recog.base.set_state(324); + recog.base.match_token(Null,&mut recog.err_handler)?; + + } + } + + If | True | False | Metabool | Metaint | Metaenum | Metastr | Typename | + Question | Bang | OpenParen | OpenCurly | Plus | Minus | Range | Nonzero | + Zero | String | Identifier + => { + let tmp = SpecifiedContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 2); + _localctx = tmp; + { + /*InvokeRule pattern*/ + recog.base.set_state(325); + recog.pattern()?; + + } + } + + _ => Err(ANTLRError::NoAltError(NoViableAltError::new(&mut recog.base)))? + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- integer ---------------- +pub type IntegerContextAll<'input> = IntegerContext<'input>; + + +pub type IntegerContext<'input> = BaseParserRuleContext<'input,IntegerContextExt<'input>>; + +#[derive(Clone)] +pub struct IntegerContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for IntegerContext<'input>{} + +impl<'input,'a> Listenable + 'a> for IntegerContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_integer(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_integer(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for IntegerContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_integer } + //fn type_rule_index() -> usize where Self: Sized { RULE_integer } +} +antlr_rust::tid!{IntegerContextExt<'a>} + +impl<'input> IntegerContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,IntegerContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait IntegerContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +/// Retrieves first TerminalNode corresponding to token Zero +/// Returns `None` if there is no child corresponding to token Zero +fn Zero(&self) -> Option>> where Self:Sized{ + self.get_token(Zero, 0) +} +/// Retrieves first TerminalNode corresponding to token Nonzero +/// Returns `None` if there is no child corresponding to token Nonzero +fn Nonzero(&self) -> Option>> where Self:Sized{ + self.get_token(Nonzero, 0) +} +/// Retrieves first TerminalNode corresponding to token Plus +/// Returns `None` if there is no child corresponding to token Plus +fn Plus(&self) -> Option>> where Self:Sized{ + self.get_token(Plus, 0) +} +/// Retrieves first TerminalNode corresponding to token Minus +/// Returns `None` if there is no child corresponding to token Minus +fn Minus(&self) -> Option>> where Self:Sized{ + self.get_token(Minus, 0) +} + +} + +impl<'input> IntegerContextAttrs<'input> for IntegerContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn integer(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = IntegerContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 50, RULE_integer); + let mut _localctx: Rc = _localctx; + let mut _la: isize = -1; + let result: Result<(), ANTLRError> = (|| { + + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(329); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + if _la==Plus || _la==Minus { + { + recog.base.set_state(328); + _la = recog.base.input.la(1); + if { !(_la==Plus || _la==Minus) } { + recog.err_handler.recover_inline(&mut recog.base)?; + + } + else { + if recog.base.input.la(1)==TOKEN_EOF { recog.base.matched_eof = true }; + recog.err_handler.report_match(&mut recog.base); + recog.base.consume(&mut recog.err_handler); + } + } + } + + recog.base.set_state(331); + _la = recog.base.input.la(1); + if { !(_la==Nonzero || _la==Zero) } { + recog.err_handler.recover_inline(&mut recog.base)?; + + } + else { + if recog.base.input.la(1)==TOKEN_EOF { recog.base.matched_eof = true }; + recog.err_handler.report_match(&mut recog.base); + recog.base.consume(&mut recog.err_handler); + } + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- identifierPath ---------------- +pub type IdentifierPathContextAll<'input> = IdentifierPathContext<'input>; + + +pub type IdentifierPathContext<'input> = BaseParserRuleContext<'input,IdentifierPathContextExt<'input>>; + +#[derive(Clone)] +pub struct IdentifierPathContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for IdentifierPathContext<'input>{} + +impl<'input,'a> Listenable + 'a> for IdentifierPathContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_identifierPath(self); + }fn exit(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.exit_identifierPath(self); + listener.exit_every_rule(self); + } +} + +impl<'input> CustomRuleContext<'input> for IdentifierPathContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_identifierPath } + //fn type_rule_index() -> usize where Self: Sized { RULE_identifierPath } +} +antlr_rust::tid!{IdentifierPathContextExt<'a>} + +impl<'input> IdentifierPathContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,IdentifierPathContextExt{ + ph:PhantomData + }), + ) + } +} + +pub trait IdentifierPathContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + +/// Retrieves all `TerminalNode`s corresponding to token Identifier in current rule +fn Identifier_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +/// Retrieves 'i's TerminalNode corresponding to token Identifier, starting from 0. +/// Returns `None` if number of children corresponding to token Identifier is less or equal than `i`. +fn Identifier(&self, i: usize) -> Option>> where Self:Sized{ + self.get_token(Identifier, i) +} +/// Retrieves all `TerminalNode`s corresponding to token Period in current rule +fn Period_all(&self) -> Vec>> where Self:Sized{ + self.children_of_type() +} +/// Retrieves 'i's TerminalNode corresponding to token Period, starting from 0. +/// Returns `None` if number of children corresponding to token Period is less or equal than `i`. +fn Period(&self, i: usize) -> Option>> where Self:Sized{ + self.get_token(Period, i) +} + +} + +impl<'input> IdentifierPathContextAttrs<'input> for IdentifierPathContext<'input>{} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn identifierPath(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = IdentifierPathContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 52, RULE_identifierPath); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(337); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(36,&mut recog.base)?; + while { _alt!=2 && _alt!=INVALID_ALT } { + if _alt==1 { + { + { + recog.base.set_state(333); + recog.base.match_token(Identifier,&mut recog.err_handler)?; + + recog.base.set_state(334); + recog.base.match_token(Period,&mut recog.err_handler)?; + + } + } + } + recog.base.set_state(339); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(36,&mut recog.base)?; + } + recog.base.set_state(340); + recog.base.match_token(Identifier,&mut recog.err_handler)?; + + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} +//------------------- identifierOrString ---------------- +#[derive(Debug)] +pub enum IdentifierOrStringContextAll<'input>{ + StrContext(StrContext<'input>), + IdentContext(IdentContext<'input>), +Error(IdentifierOrStringContext<'input>) +} +antlr_rust::tid!{IdentifierOrStringContextAll<'a>} + +impl<'input> antlr_rust::parser_rule_context::DerefSeal for IdentifierOrStringContextAll<'input>{} + +impl<'input> SubstraitTypeParserContext<'input> for IdentifierOrStringContextAll<'input>{} + +impl<'input> Deref for IdentifierOrStringContextAll<'input>{ + type Target = dyn IdentifierOrStringContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target{ + use IdentifierOrStringContextAll::*; + match self{ + StrContext(inner) => inner, + IdentContext(inner) => inner, +Error(inner) => inner + } + } +} +impl<'input,'a> Listenable + 'a> for IdentifierOrStringContextAll<'input>{ + fn enter(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().enter(listener) } + fn exit(&self, listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { self.deref().exit(listener) } +} + + + +pub type IdentifierOrStringContext<'input> = BaseParserRuleContext<'input,IdentifierOrStringContextExt<'input>>; + +#[derive(Clone)] +pub struct IdentifierOrStringContextExt<'input>{ +ph:PhantomData<&'input str> +} + +impl<'input> SubstraitTypeParserContext<'input> for IdentifierOrStringContext<'input>{} + +impl<'input,'a> Listenable + 'a> for IdentifierOrStringContext<'input>{ +} + +impl<'input> CustomRuleContext<'input> for IdentifierOrStringContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_identifierOrString } + //fn type_rule_index() -> usize where Self: Sized { RULE_identifierOrString } +} +antlr_rust::tid!{IdentifierOrStringContextExt<'a>} + +impl<'input> IdentifierOrStringContextExt<'input>{ + fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { + Rc::new( + IdentifierOrStringContextAll::Error( + BaseParserRuleContext::new_parser_ctx(parent, invoking_state,IdentifierOrStringContextExt{ + ph:PhantomData + }), + ) + ) + } +} + +pub trait IdentifierOrStringContextAttrs<'input>: SubstraitTypeParserContext<'input> + BorrowMut>{ + + +} + +impl<'input> IdentifierOrStringContextAttrs<'input> for IdentifierOrStringContext<'input>{} + +pub type StrContext<'input> = BaseParserRuleContext<'input,StrContextExt<'input>>; + +pub trait StrContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token String + /// Returns `None` if there is no child corresponding to token String + fn String(&self) -> Option>> where Self:Sized{ + self.get_token(String, 0) + } +} + +impl<'input> StrContextAttrs<'input> for StrContext<'input>{} + +pub struct StrContextExt<'input>{ + base:IdentifierOrStringContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{StrContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for StrContext<'input>{} + +impl<'input,'a> Listenable + 'a> for StrContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Str(self); + } +} + +impl<'input> CustomRuleContext<'input> for StrContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_identifierOrString } + //fn type_rule_index() -> usize where Self: Sized { RULE_identifierOrString } +} + +impl<'input> Borrow> for StrContext<'input>{ + fn borrow(&self) -> &IdentifierOrStringContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for StrContext<'input>{ + fn borrow_mut(&mut self) -> &mut IdentifierOrStringContextExt<'input> { &mut self.base } +} + +impl<'input> IdentifierOrStringContextAttrs<'input> for StrContext<'input> {} + +impl<'input> StrContextExt<'input>{ + fn new(ctx: &dyn IdentifierOrStringContextAttrs<'input>) -> Rc> { + Rc::new( + IdentifierOrStringContextAll::StrContext( + BaseParserRuleContext::copy_from(ctx,StrContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +pub type IdentContext<'input> = BaseParserRuleContext<'input,IdentContextExt<'input>>; + +pub trait IdentContextAttrs<'input>: SubstraitTypeParserContext<'input>{ + /// Retrieves first TerminalNode corresponding to token Identifier + /// Returns `None` if there is no child corresponding to token Identifier + fn Identifier(&self) -> Option>> where Self:Sized{ + self.get_token(Identifier, 0) + } +} + +impl<'input> IdentContextAttrs<'input> for IdentContext<'input>{} + +pub struct IdentContextExt<'input>{ + base:IdentifierOrStringContextExt<'input>, + ph:PhantomData<&'input str> +} + +antlr_rust::tid!{IdentContextExt<'a>} + +impl<'input> SubstraitTypeParserContext<'input> for IdentContext<'input>{} + +impl<'input,'a> Listenable + 'a> for IdentContext<'input>{ + fn enter(&self,listener: &mut (dyn SubstraitTypeListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_Ident(self); + } +} + +impl<'input> CustomRuleContext<'input> for IdentContextExt<'input>{ + type TF = LocalTokenFactory<'input>; + type Ctx = SubstraitTypeParserContextType; + fn get_rule_index(&self) -> usize { RULE_identifierOrString } + //fn type_rule_index() -> usize where Self: Sized { RULE_identifierOrString } +} + +impl<'input> Borrow> for IdentContext<'input>{ + fn borrow(&self) -> &IdentifierOrStringContextExt<'input> { &self.base } +} +impl<'input> BorrowMut> for IdentContext<'input>{ + fn borrow_mut(&mut self) -> &mut IdentifierOrStringContextExt<'input> { &mut self.base } +} + +impl<'input> IdentifierOrStringContextAttrs<'input> for IdentContext<'input> {} + +impl<'input> IdentContextExt<'input>{ + fn new(ctx: &dyn IdentifierOrStringContextAttrs<'input>) -> Rc> { + Rc::new( + IdentifierOrStringContextAll::IdentContext( + BaseParserRuleContext::copy_from(ctx,IdentContextExt{ + base: ctx.borrow().clone(), + ph:PhantomData + }) + ) + ) + } +} + +impl<'input, I, H> SubstraitTypeParser<'input, I, H> +where + I: TokenStream<'input, TF = LocalTokenFactory<'input> > + TidAble<'input>, + H: ErrorStrategy<'input,BaseParserType<'input,I>> +{ + pub fn identifierOrString(&mut self,) + -> Result>,ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = IdentifierOrStringContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog.base.enter_rule(_localctx.clone(), 54, RULE_identifierOrString); + let mut _localctx: Rc = _localctx; + let result: Result<(), ANTLRError> = (|| { + + recog.base.set_state(344); + recog.err_handler.sync(&mut recog.base)?; + match recog.base.input.la(1) { + String + => { + let tmp = StrContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 1); + _localctx = tmp; + { + recog.base.set_state(342); + recog.base.match_token(String,&mut recog.err_handler)?; + + } + } + + Identifier + => { + let tmp = IdentContextExt::new(&**_localctx); + recog.base.enter_outer_alt(Some(tmp.clone()), 2); + _localctx = tmp; + { + recog.base.set_state(343); + recog.base.match_token(Identifier,&mut recog.err_handler)?; + + } + } + + _ => Err(ANTLRError::NoAltError(NoViableAltError::new(&mut recog.base)))? + } + Ok(()) + })(); + match result { + Ok(_)=>{}, + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } +} + +lazy_static! { + static ref _ATN: Arc = + Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); + static ref _decision_to_DFA: Arc>> = { + let mut dfa = Vec::new(); + let size = _ATN.decision_to_state.len(); + for i in 0..size { + dfa.push(DFA::new( + _ATN.clone(), + _ATN.get_decision_state(i), + i as isize, + ).into()) + } + Arc::new(dfa) + }; +} + + + +const _serializedATN:&'static str = + "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x03\ + \x32\u{15d}\x04\x02\x09\x02\x04\x03\x09\x03\x04\x04\x09\x04\x04\x05\x09\ + \x05\x04\x06\x09\x06\x04\x07\x09\x07\x04\x08\x09\x08\x04\x09\x09\x09\x04\ + \x0a\x09\x0a\x04\x0b\x09\x0b\x04\x0c\x09\x0c\x04\x0d\x09\x0d\x04\x0e\x09\ + \x0e\x04\x0f\x09\x0f\x04\x10\x09\x10\x04\x11\x09\x11\x04\x12\x09\x12\x04\ + \x13\x09\x13\x04\x14\x09\x14\x04\x15\x09\x15\x04\x16\x09\x16\x04\x17\x09\ + \x17\x04\x18\x09\x18\x04\x19\x09\x19\x04\x1a\x09\x1a\x04\x1b\x09\x1b\x04\ + \x1c\x09\x1c\x04\x1d\x09\x1d\x03\x02\x07\x02\x3c\x0a\x02\x0c\x02\x0e\x02\ + \x3f\x0b\x02\x03\x02\x07\x02\x42\x0a\x02\x0c\x02\x0e\x02\x45\x0b\x02\x03\ + \x02\x03\x02\x07\x02\x49\x0a\x02\x0c\x02\x0e\x02\x4c\x0b\x02\x03\x02\x03\ + \x02\x03\x03\x07\x03\x51\x0a\x03\x0c\x03\x0e\x03\x54\x0b\x03\x03\x03\x07\ + \x03\x57\x0a\x03\x0c\x03\x0e\x03\x5a\x0b\x03\x03\x03\x03\x03\x07\x03\x5e\ + \x0a\x03\x0c\x03\x0e\x03\x61\x0b\x03\x03\x03\x03\x03\x03\x04\x03\x04\x03\ + \x04\x07\x04\x68\x0a\x04\x0c\x04\x0e\x04\x6b\x0b\x04\x03\x04\x03\x04\x03\ + \x05\x07\x05\x70\x0a\x05\x0c\x05\x0e\x05\x73\x0b\x05\x03\x05\x03\x05\x03\ + \x05\x07\x05\x78\x0a\x05\x0c\x05\x0e\x05\x7b\x0b\x05\x05\x05\x7d\x0a\x05\ + \x03\x06\x03\x06\x03\x06\x03\x06\x03\x06\x03\x06\x03\x06\x03\x06\x03\x06\ + \x03\x06\x03\x06\x05\x06\u{8a}\x0a\x06\x03\x07\x03\x07\x03\x08\x03\x08\x03\ + \x08\x03\x08\x07\x08\u{92}\x0a\x08\x0c\x08\x0e\x08\u{95}\x0b\x08\x03\x09\ + \x03\x09\x03\x0a\x03\x0a\x03\x0a\x03\x0a\x07\x0a\u{9d}\x0a\x0a\x0c\x0a\x0e\ + \x0a\u{a0}\x0b\x0a\x03\x0b\x03\x0b\x03\x0c\x03\x0c\x03\x0c\x03\x0c\x07\x0c\ + \u{a8}\x0a\x0c\x0c\x0c\x0e\x0c\u{ab}\x0b\x0c\x03\x0d\x03\x0d\x05\x0d\u{af}\ + \x0a\x0d\x03\x0e\x03\x0e\x03\x0e\x03\x0e\x07\x0e\u{b5}\x0a\x0e\x0c\x0e\x0e\ + \x0e\u{b8}\x0b\x0e\x03\x0f\x03\x0f\x03\x0f\x03\x0f\x05\x0f\u{be}\x0a\x0f\ + \x03\x10\x03\x10\x03\x10\x03\x10\x07\x10\u{c4}\x0a\x10\x0c\x10\x0e\x10\u{c7}\ + \x0b\x10\x03\x11\x03\x11\x05\x11\u{cb}\x0a\x11\x03\x12\x03\x12\x03\x12\x03\ + \x12\x07\x12\u{d1}\x0a\x12\x0c\x12\x0e\x12\u{d4}\x0b\x12\x03\x13\x03\x13\ + \x05\x13\u{d8}\x0a\x13\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\ + \x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\ + \x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\ + \x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x07\ + \x14\u{fb}\x0a\x14\x0c\x14\x0e\x14\u{fe}\x0b\x14\x03\x14\x03\x14\x03\x14\ + \x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x07\x14\u{109}\x0a\x14\ + \x0c\x14\x0e\x14\u{10c}\x0b\x14\x05\x14\u{10e}\x0a\x14\x03\x14\x03\x14\x03\ + \x14\x05\x14\u{113}\x0a\x14\x03\x14\x05\x14\u{116}\x0a\x14\x03\x14\x05\x14\ + \u{119}\x0a\x14\x03\x14\x03\x14\x03\x14\x05\x14\u{11e}\x0a\x14\x03\x14\x03\ + \x14\x05\x14\u{122}\x0a\x14\x03\x15\x03\x15\x03\x15\x03\x15\x05\x15\u{128}\ + \x0a\x15\x03\x16\x03\x16\x03\x16\x03\x16\x03\x17\x03\x17\x03\x17\x05\x17\ + \u{131}\x0a\x17\x03\x18\x03\x18\x03\x18\x03\x18\x07\x18\u{137}\x0a\x18\x0c\ + \x18\x0e\x18\u{13a}\x0b\x18\x05\x18\u{13c}\x0a\x18\x03\x18\x03\x18\x03\x19\ + \x03\x19\x03\x19\x05\x19\u{143}\x0a\x19\x03\x19\x03\x19\x03\x1a\x03\x1a\ + \x05\x1a\u{149}\x0a\x1a\x03\x1b\x05\x1b\u{14c}\x0a\x1b\x03\x1b\x03\x1b\x03\ + \x1c\x03\x1c\x07\x1c\u{152}\x0a\x1c\x0c\x1c\x0e\x1c\u{155}\x0b\x1c\x03\x1c\ + \x03\x1c\x03\x1d\x03\x1d\x05\x1d\u{15b}\x0a\x1d\x03\x1d\x02\x02\x1e\x02\ + \x04\x06\x08\x0a\x0c\x0e\x10\x12\x14\x16\x18\x1a\x1c\x1e\x20\x22\x24\x26\ + \x28\x2a\x2c\x2e\x30\x32\x34\x36\x38\x02\x04\x03\x02\x2a\x2b\x03\x02\x2f\ + \x30\x02\u{17e}\x02\x3d\x03\x02\x02\x02\x04\x52\x03\x02\x02\x02\x06\x69\ + \x03\x02\x02\x02\x08\x71\x03\x02\x02\x02\x0a\u{89}\x03\x02\x02\x02\x0c\u{8b}\ + \x03\x02\x02\x02\x0e\u{8d}\x03\x02\x02\x02\x10\u{96}\x03\x02\x02\x02\x12\ + \u{98}\x03\x02\x02\x02\x14\u{a1}\x03\x02\x02\x02\x16\u{a3}\x03\x02\x02\x02\ + \x18\u{ae}\x03\x02\x02\x02\x1a\u{b0}\x03\x02\x02\x02\x1c\u{bd}\x03\x02\x02\ + \x02\x1e\u{bf}\x03\x02\x02\x02\x20\u{ca}\x03\x02\x02\x02\x22\u{cc}\x03\x02\ + \x02\x02\x24\u{d7}\x03\x02\x02\x02\x26\u{121}\x03\x02\x02\x02\x28\u{127}\ + \x03\x02\x02\x02\x2a\u{129}\x03\x02\x02\x02\x2c\u{130}\x03\x02\x02\x02\x2e\ + \u{132}\x03\x02\x02\x02\x30\u{142}\x03\x02\x02\x02\x32\u{148}\x03\x02\x02\ + \x02\x34\u{14b}\x03\x02\x02\x02\x36\u{153}\x03\x02\x02\x02\x38\u{15a}\x03\ + \x02\x02\x02\x3a\x3c\x07\x05\x02\x02\x3b\x3a\x03\x02\x02\x02\x3c\x3f\x03\ + \x02\x02\x02\x3d\x3b\x03\x02\x02\x02\x3d\x3e\x03\x02\x02\x02\x3e\x43\x03\ + \x02\x02\x02\x3f\x3d\x03\x02\x02\x02\x40\x42\x07\x06\x02\x02\x41\x40\x03\ + \x02\x02\x02\x42\x45\x03\x02\x02\x02\x43\x41\x03\x02\x02\x02\x43\x44\x03\ + \x02\x02\x02\x44\x46\x03\x02\x02\x02\x45\x43\x03\x02\x02\x02\x46\x4a\x05\ + \x0c\x07\x02\x47\x49\x07\x06\x02\x02\x48\x47\x03\x02\x02\x02\x49\x4c\x03\ + \x02\x02\x02\x4a\x48\x03\x02\x02\x02\x4a\x4b\x03\x02\x02\x02\x4b\x4d\x03\ + \x02\x02\x02\x4c\x4a\x03\x02\x02\x02\x4d\x4e\x07\x02\x02\x03\x4e\x03\x03\ + \x02\x02\x02\x4f\x51\x07\x05\x02\x02\x50\x4f\x03\x02\x02\x02\x51\x54\x03\ + \x02\x02\x02\x52\x50\x03\x02\x02\x02\x52\x53\x03\x02\x02\x02\x53\x58\x03\ + \x02\x02\x02\x54\x52\x03\x02\x02\x02\x55\x57\x07\x06\x02\x02\x56\x55\x03\ + \x02\x02\x02\x57\x5a\x03\x02\x02\x02\x58\x56\x03\x02\x02\x02\x58\x59\x03\ + \x02\x02\x02\x59\x5b\x03\x02\x02\x02\x5a\x58\x03\x02\x02\x02\x5b\x5f\x05\ + \x06\x04\x02\x5c\x5e\x07\x06\x02\x02\x5d\x5c\x03\x02\x02\x02\x5e\x61\x03\ + \x02\x02\x02\x5f\x5d\x03\x02\x02\x02\x5f\x60\x03\x02\x02\x02\x60\x62\x03\ + \x02\x02\x02\x61\x5f\x03\x02\x02\x02\x62\x63\x07\x02\x02\x03\x63\x05\x03\ + \x02\x02\x02\x64\x65\x05\x0a\x06\x02\x65\x66\x05\x08\x05\x02\x66\x68\x03\ + \x02\x02\x02\x67\x64\x03\x02\x02\x02\x68\x6b\x03\x02\x02\x02\x69\x67\x03\ + \x02\x02\x02\x69\x6a\x03\x02\x02\x02\x6a\x6c\x03\x02\x02\x02\x6b\x69\x03\ + \x02\x02\x02\x6c\x6d\x05\x0c\x07\x02\x6d\x07\x03\x02\x02\x02\x6e\x70\x07\ + \x06\x02\x02\x6f\x6e\x03\x02\x02\x02\x70\x73\x03\x02\x02\x02\x71\x6f\x03\ + \x02\x02\x02\x71\x72\x03\x02\x02\x02\x72\x7c\x03\x02\x02\x02\x73\x71\x03\ + \x02\x02\x02\x74\x7d\x07\x06\x02\x02\x75\x79\x07\x18\x02\x02\x76\x78\x07\ + \x06\x02\x02\x77\x76\x03\x02\x02\x02\x78\x7b\x03\x02\x02\x02\x79\x77\x03\ + \x02\x02\x02\x79\x7a\x03\x02\x02\x02\x7a\x7d\x03\x02\x02\x02\x7b\x79\x03\ + \x02\x02\x02\x7c\x74\x03\x02\x02\x02\x7c\x75\x03\x02\x02\x02\x7d\x09\x03\ + \x02\x02\x02\x7e\x7f\x05\x0c\x07\x02\x7f\u{80}\x07\x21\x02\x02\u{80}\u{81}\ + \x05\x0c\x07\x02\u{81}\u{8a}\x03\x02\x02\x02\u{82}\u{83}\x07\x08\x02\x02\ + \u{83}\u{84}\x05\x0c\x07\x02\u{84}\u{85}\x07\x09\x02\x02\u{85}\u{86}\x05\ + \x0c\x07\x02\u{86}\u{8a}\x03\x02\x02\x02\u{87}\u{88}\x07\x08\x02\x02\u{88}\ + \u{8a}\x05\x0c\x07\x02\u{89}\x7e\x03\x02\x02\x02\u{89}\u{82}\x03\x02\x02\ + \x02\u{89}\u{87}\x03\x02\x02\x02\u{8a}\x0b\x03\x02\x02\x02\u{8b}\u{8c}\x05\ + \x0e\x08\x02\u{8c}\x0d\x03\x02\x02\x02\u{8d}\u{93}\x05\x12\x0a\x02\u{8e}\ + \u{8f}\x05\x10\x09\x02\u{8f}\u{90}\x05\x12\x0a\x02\u{90}\u{92}\x03\x02\x02\ + \x02\u{91}\u{8e}\x03\x02\x02\x02\u{92}\u{95}\x03\x02\x02\x02\u{93}\u{91}\ + \x03\x02\x02\x02\u{93}\u{94}\x03\x02\x02\x02\u{94}\x0f\x03\x02\x02\x02\u{95}\ + \u{93}\x03\x02\x02\x02\u{96}\u{97}\x07\x22\x02\x02\u{97}\x11\x03\x02\x02\ + \x02\u{98}\u{9e}\x05\x16\x0c\x02\u{99}\u{9a}\x05\x14\x0b\x02\u{9a}\u{9b}\ + \x05\x16\x0c\x02\u{9b}\u{9d}\x03\x02\x02\x02\u{9c}\u{99}\x03\x02\x02\x02\ + \u{9d}\u{a0}\x03\x02\x02\x02\u{9e}\u{9c}\x03\x02\x02\x02\u{9e}\u{9f}\x03\ + \x02\x02\x02\u{9f}\x13\x03\x02\x02\x02\u{a0}\u{9e}\x03\x02\x02\x02\u{a1}\ + \u{a2}\x07\x23\x02\x02\u{a2}\x15\x03\x02\x02\x02\u{a3}\u{a9}\x05\x1a\x0e\ + \x02\u{a4}\u{a5}\x05\x18\x0d\x02\u{a5}\u{a6}\x05\x1a\x0e\x02\u{a6}\u{a8}\ + \x03\x02\x02\x02\u{a7}\u{a4}\x03\x02\x02\x02\u{a8}\u{ab}\x03\x02\x02\x02\ + \u{a9}\u{a7}\x03\x02\x02\x02\u{a9}\u{aa}\x03\x02\x02\x02\u{aa}\x17\x03\x02\ + \x02\x02\u{ab}\u{a9}\x03\x02\x02\x02\u{ac}\u{af}\x07\x24\x02\x02\u{ad}\u{af}\ + \x07\x25\x02\x02\u{ae}\u{ac}\x03\x02\x02\x02\u{ae}\u{ad}\x03\x02\x02\x02\ + \u{af}\x19\x03\x02\x02\x02\u{b0}\u{b6}\x05\x1e\x10\x02\u{b1}\u{b2}\x05\x1c\ + \x0f\x02\u{b2}\u{b3}\x05\x1e\x10\x02\u{b3}\u{b5}\x03\x02\x02\x02\u{b4}\u{b1}\ + \x03\x02\x02\x02\u{b5}\u{b8}\x03\x02\x02\x02\u{b6}\u{b4}\x03\x02\x02\x02\ + \u{b6}\u{b7}\x03\x02\x02\x02\u{b7}\x1b\x03\x02\x02\x02\u{b8}\u{b6}\x03\x02\ + \x02\x02\u{b9}\u{be}\x07\x26\x02\x02\u{ba}\u{be}\x07\x27\x02\x02\u{bb}\u{be}\ + \x07\x28\x02\x02\u{bc}\u{be}\x07\x29\x02\x02\u{bd}\u{b9}\x03\x02\x02\x02\ + \u{bd}\u{ba}\x03\x02\x02\x02\u{bd}\u{bb}\x03\x02\x02\x02\u{bd}\u{bc}\x03\ + \x02\x02\x02\u{be}\x1d\x03\x02\x02\x02\u{bf}\u{c5}\x05\x22\x12\x02\u{c0}\ + \u{c1}\x05\x20\x11\x02\u{c1}\u{c2}\x05\x22\x12\x02\u{c2}\u{c4}\x03\x02\x02\ + \x02\u{c3}\u{c0}\x03\x02\x02\x02\u{c4}\u{c7}\x03\x02\x02\x02\u{c5}\u{c3}\ + \x03\x02\x02\x02\u{c5}\u{c6}\x03\x02\x02\x02\u{c6}\x1f\x03\x02\x02\x02\u{c7}\ + \u{c5}\x03\x02\x02\x02\u{c8}\u{cb}\x07\x2a\x02\x02\u{c9}\u{cb}\x07\x2b\x02\ + \x02\u{ca}\u{c8}\x03\x02\x02\x02\u{ca}\u{c9}\x03\x02\x02\x02\u{cb}\x21\x03\ + \x02\x02\x02\u{cc}\u{d2}\x05\x26\x14\x02\u{cd}\u{ce}\x05\x24\x13\x02\u{ce}\ + \u{cf}\x05\x26\x14\x02\u{cf}\u{d1}\x03\x02\x02\x02\u{d0}\u{cd}\x03\x02\x02\ + \x02\u{d1}\u{d4}\x03\x02\x02\x02\u{d2}\u{d0}\x03\x02\x02\x02\u{d2}\u{d3}\ + \x03\x02\x02\x02\u{d3}\x23\x03\x02\x02\x02\u{d4}\u{d2}\x03\x02\x02\x02\u{d5}\ + \u{d8}\x07\x2c\x02\x02\u{d6}\u{d8}\x07\x2d\x02\x02\u{d7}\u{d5}\x03\x02\x02\ + \x02\u{d7}\u{d6}\x03\x02\x02\x02\u{d8}\x25\x03\x02\x02\x02\u{d9}\u{da}\x07\ + \x1b\x02\x02\u{da}\u{db}\x05\x0c\x07\x02\u{db}\u{dc}\x07\x1c\x02\x02\u{dc}\ + \u{122}\x03\x02\x02\x02\u{dd}\u{de}\x07\x0a\x02\x02\u{de}\u{df}\x05\x0c\ + \x07\x02\u{df}\u{e0}\x07\x0b\x02\x02\u{e0}\u{e1}\x05\x0c\x07\x02\u{e1}\u{e2}\ + \x07\x0c\x02\x02\u{e2}\u{e3}\x05\x0c\x07\x02\u{e3}\u{122}\x03\x02\x02\x02\ + \u{e4}\u{e5}\x07\x1a\x02\x02\u{e5}\u{122}\x05\x0c\x07\x02\u{e6}\u{122}\x07\ + \x19\x02\x02\u{e7}\u{122}\x07\x10\x02\x02\u{e8}\u{122}\x07\x0e\x02\x02\u{e9}\ + \u{122}\x07\x0f\x02\x02\u{ea}\u{122}\x07\x11\x02\x02\u{eb}\u{ec}\x05\x34\ + \x1b\x02\u{ec}\u{ed}\x07\x2e\x02\x02\u{ed}\u{ee}\x05\x34\x1b\x02\u{ee}\u{122}\ + \x03\x02\x02\x02\u{ef}\u{f0}\x05\x34\x1b\x02\u{f0}\u{f1}\x07\x2e\x02\x02\ + \u{f1}\u{122}\x03\x02\x02\x02\u{f2}\u{f3}\x07\x2e\x02\x02\u{f3}\u{122}\x05\ + \x34\x1b\x02\u{f4}\u{122}\x05\x34\x1b\x02\u{f5}\u{122}\x07\x12\x02\x02\u{f6}\ + \u{f7}\x07\x1d\x02\x02\u{f7}\u{fc}\x07\x32\x02\x02\u{f8}\u{f9}\x07\x16\x02\ + \x02\u{f9}\u{fb}\x07\x32\x02\x02\u{fa}\u{f8}\x03\x02\x02\x02\u{fb}\u{fe}\ + \x03\x02\x02\x02\u{fc}\u{fa}\x03\x02\x02\x02\u{fc}\u{fd}\x03\x02\x02\x02\ + \u{fd}\u{ff}\x03\x02\x02\x02\u{fe}\u{fc}\x03\x02\x02\x02\u{ff}\u{122}\x07\ + \x1e\x02\x02\u{100}\u{122}\x07\x13\x02\x02\u{101}\u{122}\x07\x31\x02\x02\ + \u{102}\u{122}\x07\x14\x02\x02\u{103}\u{104}\x07\x32\x02\x02\u{104}\u{10d}\ + \x07\x1b\x02\x02\u{105}\u{10a}\x05\x0c\x07\x02\u{106}\u{107}\x07\x16\x02\ + \x02\u{107}\u{109}\x05\x0c\x07\x02\u{108}\u{106}\x03\x02\x02\x02\u{109}\ + \u{10c}\x03\x02\x02\x02\u{10a}\u{108}\x03\x02\x02\x02\u{10a}\u{10b}\x03\ + \x02\x02\x02\u{10b}\u{10e}\x03\x02\x02\x02\u{10c}\u{10a}\x03\x02\x02\x02\ + \u{10d}\u{105}\x03\x02\x02\x02\u{10d}\u{10e}\x03\x02\x02\x02\u{10e}\u{10f}\ + \x03\x02\x02\x02\u{10f}\u{122}\x07\x1c\x02\x02\u{110}\u{112}\x05\x36\x1c\ + \x02\u{111}\u{113}\x05\x28\x15\x02\u{112}\u{111}\x03\x02\x02\x02\u{112}\ + \u{113}\x03\x02\x02\x02\u{113}\u{115}\x03\x02\x02\x02\u{114}\u{116}\x05\ + \x2a\x16\x02\u{115}\u{114}\x03\x02\x02\x02\u{115}\u{116}\x03\x02\x02\x02\ + \u{116}\u{118}\x03\x02\x02\x02\u{117}\u{119}\x05\x2e\x18\x02\u{118}\u{117}\ + \x03\x02\x02\x02\u{118}\u{119}\x03\x02\x02\x02\u{119}\u{122}\x03\x02\x02\ + \x02\u{11a}\u{11b}\x07\x19\x02\x02\u{11b}\u{11d}\x07\x32\x02\x02\u{11c}\ + \u{11e}\x05\x28\x15\x02\u{11d}\u{11c}\x03\x02\x02\x02\u{11d}\u{11e}\x03\ + \x02\x02\x02\u{11e}\u{122}\x03\x02\x02\x02\u{11f}\u{120}\x07\x2b\x02\x02\ + \u{120}\u{122}\x05\x0c\x07\x02\u{121}\u{d9}\x03\x02\x02\x02\u{121}\u{dd}\ + \x03\x02\x02\x02\u{121}\u{e4}\x03\x02\x02\x02\u{121}\u{e6}\x03\x02\x02\x02\ + \u{121}\u{e7}\x03\x02\x02\x02\u{121}\u{e8}\x03\x02\x02\x02\u{121}\u{e9}\ + \x03\x02\x02\x02\u{121}\u{ea}\x03\x02\x02\x02\u{121}\u{eb}\x03\x02\x02\x02\ + \u{121}\u{ef}\x03\x02\x02\x02\u{121}\u{f2}\x03\x02\x02\x02\u{121}\u{f4}\ + \x03\x02\x02\x02\u{121}\u{f5}\x03\x02\x02\x02\u{121}\u{f6}\x03\x02\x02\x02\ + \u{121}\u{100}\x03\x02\x02\x02\u{121}\u{101}\x03\x02\x02\x02\u{121}\u{102}\ + \x03\x02\x02\x02\u{121}\u{103}\x03\x02\x02\x02\u{121}\u{110}\x03\x02\x02\ + \x02\u{121}\u{11a}\x03\x02\x02\x02\u{121}\u{11f}\x03\x02\x02\x02\u{122}\ + \x27\x03\x02\x02\x02\u{123}\u{128}\x07\x1a\x02\x02\u{124}\u{128}\x07\x19\ + \x02\x02\u{125}\u{126}\x07\x19\x02\x02\u{126}\u{128}\x05\x0c\x07\x02\u{127}\ + \u{123}\x03\x02\x02\x02\u{127}\u{124}\x03\x02\x02\x02\u{127}\u{125}\x03\ + \x02\x02\x02\u{128}\x29\x03\x02\x02\x02\u{129}\u{12a}\x07\x1f\x02\x02\u{12a}\ + \u{12b}\x05\x2c\x17\x02\u{12b}\u{12c}\x07\x20\x02\x02\u{12c}\x2b\x03\x02\ + \x02\x02\u{12d}\u{131}\x07\x19\x02\x02\u{12e}\u{131}\x07\x30\x02\x02\u{12f}\ + \u{131}\x05\x36\x1c\x02\u{130}\u{12d}\x03\x02\x02\x02\u{130}\u{12e}\x03\ + \x02\x02\x02\u{130}\u{12f}\x03\x02\x02\x02\u{131}\x2d\x03\x02\x02\x02\u{132}\ + \u{13b}\x07\x26\x02\x02\u{133}\u{138}\x05\x30\x19\x02\u{134}\u{135}\x07\ + \x16\x02\x02\u{135}\u{137}\x05\x30\x19\x02\u{136}\u{134}\x03\x02\x02\x02\ + \u{137}\u{13a}\x03\x02\x02\x02\u{138}\u{136}\x03\x02\x02\x02\u{138}\u{139}\ + \x03\x02\x02\x02\u{139}\u{13c}\x03\x02\x02\x02\u{13a}\u{138}\x03\x02\x02\ + \x02\u{13b}\u{133}\x03\x02\x02\x02\u{13b}\u{13c}\x03\x02\x02\x02\u{13c}\ + \u{13d}\x03\x02\x02\x02\u{13d}\u{13e}\x07\x28\x02\x02\u{13e}\x2f\x03\x02\ + \x02\x02\u{13f}\u{140}\x05\x38\x1d\x02\u{140}\u{141}\x07\x17\x02\x02\u{141}\ + \u{143}\x03\x02\x02\x02\u{142}\u{13f}\x03\x02\x02\x02\u{142}\u{143}\x03\ + \x02\x02\x02\u{143}\u{144}\x03\x02\x02\x02\u{144}\u{145}\x05\x32\x1a\x02\ + \u{145}\x31\x03\x02\x02\x02\u{146}\u{149}\x07\x0d\x02\x02\u{147}\u{149}\ + \x05\x0c\x07\x02\u{148}\u{146}\x03\x02\x02\x02\u{148}\u{147}\x03\x02\x02\ + \x02\u{149}\x33\x03\x02\x02\x02\u{14a}\u{14c}\x09\x02\x02\x02\u{14b}\u{14a}\ + \x03\x02\x02\x02\u{14b}\u{14c}\x03\x02\x02\x02\u{14c}\u{14d}\x03\x02\x02\ + \x02\u{14d}\u{14e}\x09\x03\x02\x02\u{14e}\x35\x03\x02\x02\x02\u{14f}\u{150}\ + \x07\x32\x02\x02\u{150}\u{152}\x07\x15\x02\x02\u{151}\u{14f}\x03\x02\x02\ + \x02\u{152}\u{155}\x03\x02\x02\x02\u{153}\u{151}\x03\x02\x02\x02\u{153}\ + \u{154}\x03\x02\x02\x02\u{154}\u{156}\x03\x02\x02\x02\u{155}\u{153}\x03\ + \x02\x02\x02\u{156}\u{157}\x07\x32\x02\x02\u{157}\x37\x03\x02\x02\x02\u{158}\ + \u{15b}\x07\x31\x02\x02\u{159}\u{15b}\x07\x32\x02\x02\u{15a}\u{158}\x03\ + \x02\x02\x02\u{15a}\u{159}\x03\x02\x02\x02\u{15b}\x39\x03\x02\x02\x02\x28\ + \x3d\x43\x4a\x52\x58\x5f\x69\x71\x79\x7c\u{89}\u{93}\u{9e}\u{a9}\u{ae}\u{b6}\ + \u{bd}\u{c5}\u{ca}\u{d2}\u{d7}\u{fc}\u{10a}\u{10d}\u{112}\u{115}\u{118}\ + \u{11d}\u{121}\u{127}\u{130}\u{138}\u{13b}\u{142}\u{148}\u{14b}\u{153}\u{15a}"; + diff --git a/rs/src/parse/extensions/simple/mod.rs b/rs/src/parse/extensions/simple/mod.rs index d08085a0..a2d8fa37 100644 --- a/rs/src/parse/extensions/simple/mod.rs +++ b/rs/src/parse/extensions/simple/mod.rs @@ -6,10 +6,12 @@ use crate::input::proto::substrait; use crate::output::diagnostic::Result; use crate::output::extension; +use crate::output::extension::simple::module::Scope; use crate::output::type_system::data; use crate::parse::context; mod builder; +mod derivations; mod function_decls; mod type_decls; mod type_variation_decls; diff --git a/rs/src/parse/traversal.rs b/rs/src/parse/traversal.rs index 59f88f60..6f266cbe 100644 --- a/rs/src/parse/traversal.rs +++ b/rs/src/parse/traversal.rs @@ -1262,3 +1262,203 @@ pub fn read_yaml( Some(json_data) } + +//============================================================================= +// ANTLR syntax tree node handling +//============================================================================= + +/// Wrapper type to satisfy push_child()'s InputNode trait bound on the input +/// node type. +struct AntlrContextWrapper<'a, T>(&'a T); + +impl<'a, T> InputNode for AntlrContextWrapper<'a, T> { + fn type_to_node() -> tree::Node { + tree::NodeType::AstNode.into() + } + + fn data_to_node(&self) -> tree::Node { + tree::NodeType::AstNode.into() + } + + fn oneof_variant(&self) -> Option<&'static str> { + None + } + + fn parse_unknown(&self, _: &mut context::Context<'_>) -> bool { + false + } +} + +/// Convenience/shorthand macro for traversing into a syntax tree node by node. +macro_rules! antlr_child { + ($input:expr, $context:expr, $field:ident, $analyzer:expr) => { + antlr_child!($input, $context, $field, 0, $analyzer) + }; + ($input:expr, $context:expr, $field:ident, $index:expr, $analyzer:expr) => { + crate::parse::traversal::push_antlr_child( + $context, + $input, + $index, + stringify!($field), + $analyzer, + ) + }; + ($input:expr, $context:expr, $field:ident, $index:expr, $analyzer:expr, $($args:expr),*) => { + antlr_child!($input, $context, $field, $index, |x, y| $analyzer(x, y, $($args),*)) + }; +} + +/// Parse and push a child of an ANTLR syntax tree node. +pub fn push_antlr_child<'input, TP, TC, TR, FA>( + context: &mut context::Context, + parent: &TP, + index: usize, + field: &'static str, + analyzer: FA, +) -> OptionalResult +where + TP: antlr_rust::parser_rule_context::ParserRuleContext<'input>, + FA: FnOnce(&TC, &mut context::Context) -> diagnostic::Result, + TC: antlr_rust::parser_rule_context::ParserRuleContext<'input, TF = TP::TF, Ctx = TP::Ctx> + + 'input, +{ + if let Some(child) = parent.child_of_type::(index) { + let (field_output, result) = push_child( + context, + &AntlrContextWrapper(child.as_ref()), + path::PathElement::Field(field.to_string()), + false, + |x: &AntlrContextWrapper, y| analyzer(x.0, y), + ); + (Some(field_output), result) + } else { + (None, None) + } +} + +/// Convenience/shorthand macro for traversing into a syntax tree node by node. +/// Contrary to antlr_child! and most other traversal macros, this does NOT +/// make a child node in the resulting tree. It can be used to hide unobvious +/// grammar constructs, such rules related to avoiding left recursion. +macro_rules! antlr_hidden_child { + ($input:expr, $context:expr, $analyzer:expr) => { + antlr_hidden_child!($input, $context, 0, $analyzer) + }; + ($input:expr, $context:expr, $index:expr, $analyzer:expr) => { + crate::parse::traversal::push_antlr_hidden_child( + $context, + $input, + $index, + $analyzer, + ) + }; + ($input:expr, $context:expr, $index:expr, $analyzer:expr, $($args:expr),*) => { + antlr_hidden_child!($input, $context, $index, |x, y| $analyzer(x, y, $($args),*)) + }; +} + +/// Parse and push a child of an ANTLR syntax tree node, without making a +/// corresponding child node in the output tree. +pub fn push_antlr_hidden_child<'input, TP, TC, TR, FA>( + context: &mut context::Context, + parent: &TP, + index: usize, + analyzer: FA, +) -> Option +where + TP: antlr_rust::parser_rule_context::ParserRuleContext<'input>, + FA: FnOnce(&TC, &mut context::Context) -> diagnostic::Result, + TC: antlr_rust::parser_rule_context::ParserRuleContext<'input, TF = TP::TF, Ctx = TP::Ctx> + + 'input, +{ + parent.child_of_type::(index).and_then(|child| { + analyzer(child.as_ref(), context) + .map_err(|cause| { + diagnostic!(context, Error, cause); + }) + .ok() + }) +} + +/// This does more or less the opposite of pushing a hidden child: it creates a +/// child node in the output tree without traversing deeper into the input +/// tree. It can be used to hide unobvious grammar constructs, such rules +/// related to avoiding left recursion. +macro_rules! antlr_recurse { + ($input:expr, $context:expr, $field:ident, $analyzer:expr) => { + crate::parse::traversal::push_antlr_recurse( + $context, + $input, + stringify!($field), + $analyzer, + ) + }; + ($input:expr, $context:expr, $field:ident, $analyzer:expr, $($args:expr),*) => { + antlr_recurse!($input, $context, $field, |x, y| $analyzer(x, y, $($args),*)) + }; +} + +/// Parse and push a child of an ANTLR syntax tree node. +pub fn push_antlr_recurse<'input, TP, TR, FA>( + context: &mut context::Context, + parent: &TP, + field: &'static str, + analyzer: FA, +) -> RequiredResult +where + TP: antlr_rust::parser_rule_context::ParserRuleContext<'input>, + FA: FnOnce(&TP, &mut context::Context) -> diagnostic::Result, +{ + push_child( + context, + &AntlrContextWrapper(parent), + path::PathElement::Field(field.to_string()), + false, + |x: &AntlrContextWrapper, y| analyzer(x.0, y), + ) +} + +/// Convenience/shorthand macro for traversing into all children of a certain +/// type in a syntax tree. +macro_rules! antlr_children { + ($input:expr, $context:expr, $rule:ident, $analyzer:expr) => { + crate::parse::traversal::push_antlr_children( + $context, + $input, + stringify!($rule), + $analyzer, + ) + }; + ($input:expr, $context:expr, $rule:ident, $analyzer:expr, $($args:expr),*) => { + antlr_children!($input, $context, $rule, |x, y| $analyzer(x, y, $($args),*)) + }; +} + +/// Parse and push a child of an ANTLR syntax tree node. +pub fn push_antlr_children<'input, TP, TC, TR, FA>( + context: &mut context::Context, + parent: &TP, + field: &'static str, + mut analyzer: FA, +) -> RepeatedResult +where + TP: antlr_rust::parser_rule_context::ParserRuleContext<'input>, + FA: FnMut(&TC, &mut context::Context) -> diagnostic::Result, + TC: antlr_rust::parser_rule_context::ParserRuleContext<'input, TF = TP::TF, Ctx = TP::Ctx> + + 'input, +{ + parent + .children_of_type::() + .into_iter() + .enumerate() + .map(|(index, child)| { + push_child( + context, + &AntlrContextWrapper(child.as_ref()), + path::PathElement::Repeated(field.to_string(), index), + false, + |x: &AntlrContextWrapper, y| analyzer(x.0, y), + ) + }) + .unzip() +}